1 #include "config.h"
2
3 #include "gskprivate.h"
4
5 #include "gskvulkanrenderprivate.h"
6
7 #include "gskrendererprivate.h"
8 #include "gskvulkanbufferprivate.h"
9 #include "gskvulkancommandpoolprivate.h"
10 #include "gskvulkanpipelineprivate.h"
11 #include "gskvulkanrenderpassprivate.h"
12
13 #include "gskvulkanblendmodepipelineprivate.h"
14 #include "gskvulkanblurpipelineprivate.h"
15 #include "gskvulkanborderpipelineprivate.h"
16 #include "gskvulkanboxshadowpipelineprivate.h"
17 #include "gskvulkancolorpipelineprivate.h"
18 #include "gskvulkancolortextpipelineprivate.h"
19 #include "gskvulkancrossfadepipelineprivate.h"
20 #include "gskvulkaneffectpipelineprivate.h"
21 #include "gskvulkanlineargradientpipelineprivate.h"
22 #include "gskvulkantextpipelineprivate.h"
23 #include "gskvulkantexturepipelineprivate.h"
24 #include "gskvulkanpushconstantsprivate.h"
25
26 #define DESCRIPTOR_POOL_MAXSETS 128
27 #define DESCRIPTOR_POOL_MAXSETS_INCREASE 128
28
29 struct _GskVulkanRender
30 {
31 GskRenderer *renderer;
32 GdkVulkanContext *vulkan;
33
34 int scale_factor;
35 graphene_rect_t viewport;
36 cairo_region_t *clip;
37
38 GHashTable *framebuffers;
39 GskVulkanCommandPool *command_pool;
40 VkFence fence;
41 VkRenderPass render_pass;
42 VkDescriptorSetLayout descriptor_set_layout;
43 VkPipelineLayout pipeline_layout[3]; /* indexed by number of textures */
44 GskVulkanUploader *uploader;
45
46 GHashTable *descriptor_set_indexes;
47 VkDescriptorPool descriptor_pool;
48 uint32_t descriptor_pool_maxsets;
49 VkDescriptorSet *descriptor_sets;
50 gsize n_descriptor_sets;
51 GskVulkanPipeline *pipelines[GSK_VULKAN_N_PIPELINES];
52
53 GskVulkanImage *target;
54
55 VkSampler sampler;
56 VkSampler repeating_sampler;
57
58 GList *render_passes;
59 GSList *cleanup_images;
60
61 GQuark render_pass_counter;
62 GQuark gpu_time_timer;
63 };
64
65 static void
gsk_vulkan_render_setup(GskVulkanRender * self,GskVulkanImage * target,const graphene_rect_t * rect,const cairo_region_t * clip)66 gsk_vulkan_render_setup (GskVulkanRender *self,
67 GskVulkanImage *target,
68 const graphene_rect_t *rect,
69 const cairo_region_t *clip)
70 {
71 GdkSurface *window = gsk_renderer_get_surface (self->renderer);
72
73 self->target = g_object_ref (target);
74
75 if (rect)
76 {
77 self->viewport = *rect;
78 self->scale_factor = 1;
79 }
80 else
81 {
82 self->scale_factor = gdk_surface_get_scale_factor (gsk_renderer_get_surface (self->renderer));
83 self->viewport = GRAPHENE_RECT_INIT (0, 0,
84 gdk_surface_get_width (window) * self->scale_factor,
85 gdk_surface_get_height (window) * self->scale_factor);
86 }
87 if (clip)
88 {
89 cairo_rectangle_int_t extents;
90 cairo_region_get_extents (clip, &extents);
91 self->clip = cairo_region_create_rectangle (&(cairo_rectangle_int_t) {
92 extents.x, extents.y,
93 extents.width, extents.height
94 });
95 }
96 else
97 {
98 self->clip = cairo_region_create_rectangle (&(cairo_rectangle_int_t) {
99 0, 0,
100 gsk_vulkan_image_get_width (target),
101 gsk_vulkan_image_get_height (target)
102 });
103 }
104 }
105
106 static guint desc_set_index_hash (gconstpointer v);
107 static gboolean desc_set_index_equal (gconstpointer v1, gconstpointer v2);
108
109 GskVulkanRender *
gsk_vulkan_render_new(GskRenderer * renderer,GdkVulkanContext * context)110 gsk_vulkan_render_new (GskRenderer *renderer,
111 GdkVulkanContext *context)
112 {
113 GskVulkanRender *self;
114 VkDevice device;
115
116 self = g_slice_new0 (GskVulkanRender);
117
118 self->vulkan = context;
119 self->renderer = renderer;
120 self->framebuffers = g_hash_table_new (g_direct_hash, g_direct_equal);
121 self->descriptor_set_indexes = g_hash_table_new_full (desc_set_index_hash, desc_set_index_equal, NULL, g_free);
122
123 device = gdk_vulkan_context_get_device (self->vulkan);
124
125 self->command_pool = gsk_vulkan_command_pool_new (self->vulkan);
126 GSK_VK_CHECK (vkCreateFence, device,
127 &(VkFenceCreateInfo) {
128 .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
129 .flags = VK_FENCE_CREATE_SIGNALED_BIT
130 },
131 NULL,
132 &self->fence);
133
134 self->descriptor_pool_maxsets = DESCRIPTOR_POOL_MAXSETS;
135 GSK_VK_CHECK (vkCreateDescriptorPool, device,
136 &(VkDescriptorPoolCreateInfo) {
137 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
138 .maxSets = self->descriptor_pool_maxsets,
139 .poolSizeCount = 1,
140 .pPoolSizes = (VkDescriptorPoolSize[1]) {
141 {
142 .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
143 .descriptorCount = self->descriptor_pool_maxsets
144 }
145 }
146 },
147 NULL,
148 &self->descriptor_pool);
149
150 GSK_VK_CHECK (vkCreateRenderPass, gdk_vulkan_context_get_device (self->vulkan),
151 &(VkRenderPassCreateInfo) {
152 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
153 .attachmentCount = 1,
154 .pAttachments = (VkAttachmentDescription[]) {
155 {
156 .format = gdk_vulkan_context_get_image_format (self->vulkan),
157 .samples = VK_SAMPLE_COUNT_1_BIT,
158 .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
159 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
160 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
161 .finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
162 }
163 },
164 .subpassCount = 1,
165 .pSubpasses = (VkSubpassDescription []) {
166 {
167 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
168 .inputAttachmentCount = 0,
169 .colorAttachmentCount = 1,
170 .pColorAttachments = (VkAttachmentReference []) {
171 {
172 .attachment = 0,
173 .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
174 }
175 },
176 .pResolveAttachments = (VkAttachmentReference []) {
177 {
178 .attachment = VK_ATTACHMENT_UNUSED,
179 .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
180 }
181 },
182 .pDepthStencilAttachment = NULL,
183 }
184 },
185 .dependencyCount = 0
186 },
187 NULL,
188 &self->render_pass);
189
190 GSK_VK_CHECK (vkCreateDescriptorSetLayout, device,
191 &(VkDescriptorSetLayoutCreateInfo) {
192 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
193 .bindingCount = 1,
194 .pBindings = (VkDescriptorSetLayoutBinding[1]) {
195 {
196 .binding = 0,
197 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
198 .descriptorCount = 1,
199 .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT
200 }
201 }
202 },
203 NULL,
204 &self->descriptor_set_layout);
205
206 for (guint i = 0; i < 3; i++)
207 {
208 VkDescriptorSetLayout layouts[3] = {
209 self->descriptor_set_layout,
210 self->descriptor_set_layout,
211 self->descriptor_set_layout
212 };
213
214 GSK_VK_CHECK (vkCreatePipelineLayout, device,
215 &(VkPipelineLayoutCreateInfo) {
216 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
217 .setLayoutCount = i,
218 .pSetLayouts = layouts,
219 .pushConstantRangeCount = gsk_vulkan_push_constants_get_range_count (),
220 .pPushConstantRanges = gsk_vulkan_push_constants_get_ranges ()
221 },
222 NULL,
223 &self->pipeline_layout[i]);
224 }
225
226 GSK_VK_CHECK (vkCreateSampler, device,
227 &(VkSamplerCreateInfo) {
228 .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
229 .magFilter = VK_FILTER_LINEAR,
230 .minFilter = VK_FILTER_LINEAR,
231 .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
232 .addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
233 .addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT,
234 .borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
235 .unnormalizedCoordinates = VK_FALSE,
236 .maxAnisotropy = 1.0,
237 },
238 NULL,
239 &self->sampler);
240
241 GSK_VK_CHECK (vkCreateSampler, device,
242 &(VkSamplerCreateInfo) {
243 .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
244 .magFilter = VK_FILTER_LINEAR,
245 .minFilter = VK_FILTER_LINEAR,
246 .addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT,
247 .addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT,
248 .addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT,
249 .borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
250 .unnormalizedCoordinates = VK_FALSE,
251 .maxAnisotropy = 1.0,
252 },
253 NULL,
254 &self->repeating_sampler);
255
256 self->uploader = gsk_vulkan_uploader_new (self->vulkan, self->command_pool);
257
258 #ifdef G_ENABLE_DEBUG
259 self->render_pass_counter = g_quark_from_static_string ("render-passes");
260 self->gpu_time_timer = g_quark_from_static_string ("gpu-time");
261 #endif
262
263 return self;
264 }
265
266 typedef struct {
267 VkFramebuffer framebuffer;
268 } HashFramebufferEntry;
269
270 static void
gsk_vulkan_render_remove_framebuffer_from_image(gpointer data,GObject * image)271 gsk_vulkan_render_remove_framebuffer_from_image (gpointer data,
272 GObject *image)
273 {
274 GskVulkanRender *self = data;
275 HashFramebufferEntry *fb;
276
277 fb = g_hash_table_lookup (self->framebuffers, image);
278 g_hash_table_remove (self->framebuffers, image);
279
280 vkDestroyFramebuffer (gdk_vulkan_context_get_device (self->vulkan),
281 fb->framebuffer,
282 NULL);
283
284 g_slice_free (HashFramebufferEntry, fb);
285 }
286
287 VkFramebuffer
gsk_vulkan_render_get_framebuffer(GskVulkanRender * self,GskVulkanImage * image)288 gsk_vulkan_render_get_framebuffer (GskVulkanRender *self,
289 GskVulkanImage *image)
290 {
291 HashFramebufferEntry *fb;
292
293 fb = g_hash_table_lookup (self->framebuffers, image);
294 if (fb)
295 return fb->framebuffer;
296
297 fb = g_slice_new0 (HashFramebufferEntry);
298 GSK_VK_CHECK (vkCreateFramebuffer, gdk_vulkan_context_get_device (self->vulkan),
299 &(VkFramebufferCreateInfo) {
300 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
301 .renderPass = self->render_pass,
302 .attachmentCount = 1,
303 .pAttachments = (VkImageView[1]) {
304 gsk_vulkan_image_get_image_view (image)
305 },
306 .width = gsk_vulkan_image_get_width (image),
307 .height = gsk_vulkan_image_get_height (image),
308 .layers = 1
309 },
310 NULL,
311 &fb->framebuffer);
312 g_hash_table_insert (self->framebuffers, image, fb);
313 g_object_weak_ref (G_OBJECT (image), gsk_vulkan_render_remove_framebuffer_from_image, self);
314
315 return fb->framebuffer;
316 }
317
318 void
gsk_vulkan_render_add_cleanup_image(GskVulkanRender * self,GskVulkanImage * image)319 gsk_vulkan_render_add_cleanup_image (GskVulkanRender *self,
320 GskVulkanImage *image)
321 {
322 self->cleanup_images = g_slist_prepend (self->cleanup_images, image);
323 }
324
325 void
gsk_vulkan_render_add_render_pass(GskVulkanRender * self,GskVulkanRenderPass * pass)326 gsk_vulkan_render_add_render_pass (GskVulkanRender *self,
327 GskVulkanRenderPass *pass)
328 {
329 self->render_passes = g_list_prepend (self->render_passes, pass);
330
331 #ifdef G_ENABLE_DEBUG
332 gsk_profiler_counter_inc (gsk_renderer_get_profiler (self->renderer), self->render_pass_counter);
333 #endif
334 }
335
336 void
gsk_vulkan_render_add_node(GskVulkanRender * self,GskRenderNode * node)337 gsk_vulkan_render_add_node (GskVulkanRender *self,
338 GskRenderNode *node)
339 {
340 GskVulkanRenderPass *pass;
341 graphene_matrix_t mv;
342
343 graphene_matrix_init_scale (&mv, self->scale_factor, self->scale_factor, 1.0);
344
345 pass = gsk_vulkan_render_pass_new (self->vulkan,
346 self->target,
347 self->scale_factor,
348 &mv,
349 &self->viewport,
350 self->clip,
351 VK_NULL_HANDLE);
352
353 gsk_vulkan_render_add_render_pass (self, pass);
354
355 gsk_vulkan_render_pass_add (pass, self, node);
356 }
357
358 void
gsk_vulkan_render_upload(GskVulkanRender * self)359 gsk_vulkan_render_upload (GskVulkanRender *self)
360 {
361 GList *l;
362
363 /* gsk_vulkan_render_pass_upload may call gsk_vulkan_render_add_node_for_texture,
364 * prepending new render passes to the list. Therefore, we walk the list from
365 * the end.
366 */
367 for (l = g_list_last (self->render_passes); l; l = l->prev)
368 {
369 GskVulkanRenderPass *pass = l->data;
370 gsk_vulkan_render_pass_upload (pass, self, self->uploader);
371 }
372
373 gsk_vulkan_uploader_upload (self->uploader);
374 }
375
376 GskVulkanPipeline *
gsk_vulkan_render_get_pipeline(GskVulkanRender * self,GskVulkanPipelineType type)377 gsk_vulkan_render_get_pipeline (GskVulkanRender *self,
378 GskVulkanPipelineType type)
379 {
380 static const struct {
381 const char *name;
382 guint num_textures;
383 GskVulkanPipeline * (* create_func) (GdkVulkanContext *context, VkPipelineLayout layout, const char *name, VkRenderPass render_pass);
384 } pipeline_info[GSK_VULKAN_N_PIPELINES] = {
385 { "texture", 1, gsk_vulkan_texture_pipeline_new },
386 { "texture-clip", 1, gsk_vulkan_texture_pipeline_new },
387 { "texture-clip-rounded", 1, gsk_vulkan_texture_pipeline_new },
388 { "color", 0, gsk_vulkan_color_pipeline_new },
389 { "color-clip", 0, gsk_vulkan_color_pipeline_new },
390 { "color-clip-rounded", 0, gsk_vulkan_color_pipeline_new },
391 { "linear", 0, gsk_vulkan_linear_gradient_pipeline_new },
392 { "linear-clip", 0, gsk_vulkan_linear_gradient_pipeline_new },
393 { "linear-clip-rounded", 0, gsk_vulkan_linear_gradient_pipeline_new },
394 { "color-matrix", 1, gsk_vulkan_effect_pipeline_new },
395 { "color-matrix-clip", 1, gsk_vulkan_effect_pipeline_new },
396 { "color-matrix-clip-rounded", 1, gsk_vulkan_effect_pipeline_new },
397 { "border", 0, gsk_vulkan_border_pipeline_new },
398 { "border-clip", 0, gsk_vulkan_border_pipeline_new },
399 { "border-clip-rounded", 0, gsk_vulkan_border_pipeline_new },
400 { "inset-shadow", 0, gsk_vulkan_box_shadow_pipeline_new },
401 { "inset-shadow-clip", 0, gsk_vulkan_box_shadow_pipeline_new },
402 { "inset-shadow-clip-rounded", 0, gsk_vulkan_box_shadow_pipeline_new },
403 { "outset-shadow", 0, gsk_vulkan_box_shadow_pipeline_new },
404 { "outset-shadow-clip", 0, gsk_vulkan_box_shadow_pipeline_new },
405 { "outset-shadow-clip-rounded", 0, gsk_vulkan_box_shadow_pipeline_new },
406 { "blur", 1, gsk_vulkan_blur_pipeline_new },
407 { "blur-clip", 1, gsk_vulkan_blur_pipeline_new },
408 { "blur-clip-rounded", 1, gsk_vulkan_blur_pipeline_new },
409 { "mask", 1, gsk_vulkan_text_pipeline_new },
410 { "mask-clip", 1, gsk_vulkan_text_pipeline_new },
411 { "mask-clip-rounded", 1, gsk_vulkan_text_pipeline_new },
412 { "texture", 1, gsk_vulkan_color_text_pipeline_new },
413 { "texture-clip", 1, gsk_vulkan_color_text_pipeline_new },
414 { "texture-clip-rounded", 1, gsk_vulkan_color_text_pipeline_new },
415 { "crossfade", 2, gsk_vulkan_cross_fade_pipeline_new },
416 { "crossfade-clip", 2, gsk_vulkan_cross_fade_pipeline_new },
417 { "crossfade-clip-rounded", 2, gsk_vulkan_cross_fade_pipeline_new },
418 { "blendmode", 2, gsk_vulkan_blend_mode_pipeline_new },
419 { "blendmode-clip", 2, gsk_vulkan_blend_mode_pipeline_new },
420 { "blendmode-clip-rounded", 2, gsk_vulkan_blend_mode_pipeline_new },
421 };
422
423 g_return_val_if_fail (type < GSK_VULKAN_N_PIPELINES, NULL);
424
425 if (self->pipelines[type] == NULL)
426 self->pipelines[type] = pipeline_info[type].create_func (self->vulkan,
427 self->pipeline_layout[pipeline_info[type].num_textures],
428 pipeline_info[type].name,
429 self->render_pass);
430
431 return self->pipelines[type];
432 }
433
434 VkDescriptorSet
gsk_vulkan_render_get_descriptor_set(GskVulkanRender * self,gsize id)435 gsk_vulkan_render_get_descriptor_set (GskVulkanRender *self,
436 gsize id)
437 {
438 g_assert (id < self->n_descriptor_sets);
439
440 return self->descriptor_sets[id];
441 }
442
443 typedef struct {
444 gsize index;
445 GskVulkanImage *image;
446 gboolean repeat;
447 } HashDescriptorSetIndexEntry;
448
449 static guint
desc_set_index_hash(gconstpointer v)450 desc_set_index_hash (gconstpointer v)
451 {
452 const HashDescriptorSetIndexEntry *e = v;
453
454 return GPOINTER_TO_UINT (e->image) + e->repeat;
455 }
456
457 static gboolean
desc_set_index_equal(gconstpointer v1,gconstpointer v2)458 desc_set_index_equal (gconstpointer v1, gconstpointer v2)
459 {
460 const HashDescriptorSetIndexEntry *e1 = v1;
461 const HashDescriptorSetIndexEntry *e2 = v2;
462
463 return e1->image == e2->image && e1->repeat == e2->repeat;
464 }
465
466 gsize
gsk_vulkan_render_reserve_descriptor_set(GskVulkanRender * self,GskVulkanImage * source,gboolean repeat)467 gsk_vulkan_render_reserve_descriptor_set (GskVulkanRender *self,
468 GskVulkanImage *source,
469 gboolean repeat)
470 {
471 HashDescriptorSetIndexEntry lookup;
472 HashDescriptorSetIndexEntry *entry;
473
474 g_assert (source != NULL);
475
476 lookup.image = source;
477 lookup.repeat = repeat;
478
479 entry = g_hash_table_lookup (self->descriptor_set_indexes, &lookup);
480 if (entry)
481 return entry->index;
482
483 entry = g_new (HashDescriptorSetIndexEntry, 1);
484 entry->image = source;
485 entry->repeat = repeat;
486 entry->index = g_hash_table_size (self->descriptor_set_indexes);
487 g_hash_table_add (self->descriptor_set_indexes, entry);
488
489 return entry->index;
490 }
491
492 static void
gsk_vulkan_render_prepare_descriptor_sets(GskVulkanRender * self)493 gsk_vulkan_render_prepare_descriptor_sets (GskVulkanRender *self)
494 {
495 GHashTableIter iter;
496 gpointer key;
497 VkDevice device;
498 GList *l;
499 guint i, needed_sets;
500
501 device = gdk_vulkan_context_get_device (self->vulkan);
502
503 for (l = self->render_passes; l; l = l->next)
504 {
505 GskVulkanRenderPass *pass = l->data;
506 gsk_vulkan_render_pass_reserve_descriptor_sets (pass, self);
507 }
508
509 needed_sets = g_hash_table_size (self->descriptor_set_indexes);
510 if (needed_sets > self->n_descriptor_sets)
511 {
512 if (needed_sets > self->descriptor_pool_maxsets)
513 {
514 guint added_sets = needed_sets - self->descriptor_pool_maxsets;
515 added_sets = added_sets + DESCRIPTOR_POOL_MAXSETS_INCREASE - 1;
516 added_sets -= added_sets % DESCRIPTOR_POOL_MAXSETS_INCREASE;
517
518 vkDestroyDescriptorPool (device,
519 self->descriptor_pool,
520 NULL);
521 self->descriptor_pool_maxsets += added_sets;
522 GSK_VK_CHECK (vkCreateDescriptorPool, device,
523 &(VkDescriptorPoolCreateInfo) {
524 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
525 .maxSets = self->descriptor_pool_maxsets,
526 .poolSizeCount = 1,
527 .pPoolSizes = (VkDescriptorPoolSize[1]) {
528 {
529 .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
530 .descriptorCount = self->descriptor_pool_maxsets
531 }
532 }
533 },
534 NULL,
535 &self->descriptor_pool);
536 }
537 else
538 {
539 GSK_VK_CHECK (vkResetDescriptorPool, device,
540 self->descriptor_pool,
541 0);
542 }
543
544 self->n_descriptor_sets = needed_sets;
545 self->descriptor_sets = g_renew (VkDescriptorSet, self->descriptor_sets, needed_sets);
546 }
547
548 VkDescriptorSetLayout *layouts = g_newa (VkDescriptorSetLayout, needed_sets);
549 for (i = 0; i < needed_sets; i++)
550 layouts[i] = self->descriptor_set_layout;
551
552 GSK_VK_CHECK (vkAllocateDescriptorSets, device,
553 &(VkDescriptorSetAllocateInfo) {
554 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
555 .descriptorPool = self->descriptor_pool,
556 .descriptorSetCount = needed_sets,
557 .pSetLayouts = layouts
558 },
559 self->descriptor_sets);
560
561 g_hash_table_iter_init (&iter, self->descriptor_set_indexes);
562 while (g_hash_table_iter_next (&iter, &key, NULL))
563 {
564 HashDescriptorSetIndexEntry *entry = key;
565 GskVulkanImage *image = entry->image;
566 gsize id = entry->index;
567 gboolean repeat = entry->repeat;
568
569 vkUpdateDescriptorSets (device,
570 1,
571 (VkWriteDescriptorSet[1]) {
572 {
573 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
574 .dstSet = self->descriptor_sets[id],
575 .dstBinding = 0,
576 .dstArrayElement = 0,
577 .descriptorCount = 1,
578 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
579 .pImageInfo = &(VkDescriptorImageInfo) {
580 .sampler = repeat ? self->repeating_sampler : self->sampler,
581 .imageView = gsk_vulkan_image_get_image_view (image),
582 .imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
583 }
584 }
585 },
586 0, NULL);
587 }
588 }
589
590 void
gsk_vulkan_render_draw(GskVulkanRender * self)591 gsk_vulkan_render_draw (GskVulkanRender *self)
592 {
593 GList *l;
594
595 #ifdef G_ENABLE_DEBUG
596 if (GSK_RENDERER_DEBUG_CHECK (self->renderer, SYNC))
597 gsk_profiler_timer_begin (gsk_renderer_get_profiler (self->renderer), self->gpu_time_timer);
598 #endif
599
600 gsk_vulkan_render_prepare_descriptor_sets (self);
601
602 for (l = self->render_passes; l; l = l->next)
603 {
604 GskVulkanRenderPass *pass = l->data;
605 VkCommandBuffer command_buffer;
606 gsize wait_semaphore_count;
607 gsize signal_semaphore_count;
608 VkSemaphore *wait_semaphores;
609 VkSemaphore *signal_semaphores;
610
611 wait_semaphore_count = gsk_vulkan_render_pass_get_wait_semaphores (pass, &wait_semaphores);
612 signal_semaphore_count = gsk_vulkan_render_pass_get_signal_semaphores (pass, &signal_semaphores);
613
614 command_buffer = gsk_vulkan_command_pool_get_buffer (self->command_pool);
615
616 gsk_vulkan_render_pass_draw (pass, self, 3, self->pipeline_layout, command_buffer);
617
618 gsk_vulkan_command_pool_submit_buffer (self->command_pool,
619 command_buffer,
620 wait_semaphore_count,
621 wait_semaphores,
622 signal_semaphore_count,
623 signal_semaphores,
624 l->next != NULL ? VK_NULL_HANDLE : self->fence);
625 }
626
627 #ifdef G_ENABLE_DEBUG
628 if (GSK_RENDERER_DEBUG_CHECK (self->renderer, SYNC))
629 {
630 GskProfiler *profiler;
631 gint64 gpu_time;
632
633 GSK_VK_CHECK (vkWaitForFences, gdk_vulkan_context_get_device (self->vulkan),
634 1,
635 &self->fence,
636 VK_TRUE,
637 INT64_MAX);
638
639 profiler = gsk_renderer_get_profiler (self->renderer);
640 gpu_time = gsk_profiler_timer_end (profiler, self->gpu_time_timer);
641 gsk_profiler_timer_set (profiler, self->gpu_time_timer, gpu_time);
642 }
643 #endif
644 }
645
646 GdkTexture *
gsk_vulkan_render_download_target(GskVulkanRender * self)647 gsk_vulkan_render_download_target (GskVulkanRender *self)
648 {
649 gsk_vulkan_uploader_reset (self->uploader);
650
651 return gsk_vulkan_image_download (self->target, self->uploader);
652 }
653
654 static void
gsk_vulkan_render_cleanup(GskVulkanRender * self)655 gsk_vulkan_render_cleanup (GskVulkanRender *self)
656 {
657 VkDevice device = gdk_vulkan_context_get_device (self->vulkan);
658
659 /* XXX: Wait for fence here or just in reset()? */
660 GSK_VK_CHECK (vkWaitForFences, device,
661 1,
662 &self->fence,
663 VK_TRUE,
664 INT64_MAX);
665
666 GSK_VK_CHECK (vkResetFences, device,
667 1,
668 &self->fence);
669
670 gsk_vulkan_uploader_reset (self->uploader);
671
672 gsk_vulkan_command_pool_reset (self->command_pool);
673
674 g_hash_table_remove_all (self->descriptor_set_indexes);
675 GSK_VK_CHECK (vkResetDescriptorPool, device,
676 self->descriptor_pool,
677 0);
678
679 g_list_free_full (self->render_passes, (GDestroyNotify) gsk_vulkan_render_pass_free);
680 self->render_passes = NULL;
681 g_slist_free_full (self->cleanup_images, g_object_unref);
682 self->cleanup_images = NULL;
683
684 g_clear_pointer (&self->clip, cairo_region_destroy);
685 g_clear_object (&self->target);
686 }
687
688 void
gsk_vulkan_render_free(GskVulkanRender * self)689 gsk_vulkan_render_free (GskVulkanRender *self)
690 {
691 GHashTableIter iter;
692 gpointer key, value;
693 VkDevice device;
694 guint i;
695
696 gsk_vulkan_render_cleanup (self);
697
698 device = gdk_vulkan_context_get_device (self->vulkan);
699
700 g_hash_table_iter_init (&iter, self->framebuffers);
701 while (g_hash_table_iter_next (&iter, &key, &value))
702 {
703 HashFramebufferEntry *fb = value;
704
705 vkDestroyFramebuffer (gdk_vulkan_context_get_device (self->vulkan),
706 fb->framebuffer,
707 NULL);
708 g_slice_free (HashFramebufferEntry, fb);
709 g_object_weak_unref (G_OBJECT (key), gsk_vulkan_render_remove_framebuffer_from_image, self);
710 g_hash_table_iter_remove (&iter);
711 }
712 g_hash_table_unref (self->framebuffers);
713
714 for (i = 0; i < GSK_VULKAN_N_PIPELINES; i++)
715 g_clear_object (&self->pipelines[i]);
716
717 g_clear_pointer (&self->uploader, gsk_vulkan_uploader_free);
718
719 for (i = 0; i < 3; i++)
720 vkDestroyPipelineLayout (device,
721 self->pipeline_layout[i],
722 NULL);
723
724 vkDestroyRenderPass (device,
725 self->render_pass,
726 NULL);
727
728 vkDestroyDescriptorPool (device,
729 self->descriptor_pool,
730 NULL);
731 g_free (self->descriptor_sets);
732 g_hash_table_unref (self->descriptor_set_indexes);
733
734 vkDestroyDescriptorSetLayout (device,
735 self->descriptor_set_layout,
736 NULL);
737
738 vkDestroyFence (device,
739 self->fence,
740 NULL);
741
742 vkDestroySampler (device,
743 self->sampler,
744 NULL);
745
746 vkDestroySampler (device,
747 self->repeating_sampler,
748 NULL);
749
750 gsk_vulkan_command_pool_free (self->command_pool);
751
752 g_slice_free (GskVulkanRender, self);
753 }
754
755 gboolean
gsk_vulkan_render_is_busy(GskVulkanRender * self)756 gsk_vulkan_render_is_busy (GskVulkanRender *self)
757 {
758 return vkGetFenceStatus (gdk_vulkan_context_get_device (self->vulkan), self->fence) != VK_SUCCESS;
759 }
760
761 void
gsk_vulkan_render_reset(GskVulkanRender * self,GskVulkanImage * target,const graphene_rect_t * rect,const cairo_region_t * clip)762 gsk_vulkan_render_reset (GskVulkanRender *self,
763 GskVulkanImage *target,
764 const graphene_rect_t *rect,
765 const cairo_region_t *clip)
766 {
767 gsk_vulkan_render_cleanup (self);
768
769 gsk_vulkan_render_setup (self, target, rect, clip);
770 }
771
772 GskRenderer *
gsk_vulkan_render_get_renderer(GskVulkanRender * self)773 gsk_vulkan_render_get_renderer (GskVulkanRender *self)
774 {
775 return self->renderer;
776 }
777