1 /*
2  * Copyright © 2016 Dave Airlie
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include <assert.h>
25 #include <stdbool.h>
26 
27 #include "nir/nir_builder.h"
28 #include "radv_meta.h"
29 #include "radv_private.h"
30 #include "sid.h"
31 #include "vk_format.h"
32 
33 static nir_ssa_def *
radv_meta_build_resolve_srgb_conversion(nir_builder * b,nir_ssa_def * input)34 radv_meta_build_resolve_srgb_conversion(nir_builder *b, nir_ssa_def *input)
35 {
36    unsigned i;
37 
38    nir_ssa_def *cmp[3];
39    for (i = 0; i < 3; i++)
40       cmp[i] = nir_flt(b, nir_channel(b, input, i), nir_imm_int(b, 0x3b4d2e1c));
41 
42    nir_ssa_def *ltvals[3];
43    for (i = 0; i < 3; i++)
44       ltvals[i] = nir_fmul(b, nir_channel(b, input, i), nir_imm_float(b, 12.92));
45 
46    nir_ssa_def *gtvals[3];
47 
48    for (i = 0; i < 3; i++) {
49       gtvals[i] = nir_fpow(b, nir_channel(b, input, i), nir_imm_float(b, 1.0 / 2.4));
50       gtvals[i] = nir_fmul(b, gtvals[i], nir_imm_float(b, 1.055));
51       gtvals[i] = nir_fsub(b, gtvals[i], nir_imm_float(b, 0.055));
52    }
53 
54    nir_ssa_def *comp[4];
55    for (i = 0; i < 3; i++)
56       comp[i] = nir_bcsel(b, cmp[i], ltvals[i], gtvals[i]);
57    comp[3] = nir_channels(b, input, 1 << 3);
58    return nir_vec(b, comp, 4);
59 }
60 
61 static nir_shader *
build_resolve_compute_shader(struct radv_device * dev,bool is_integer,bool is_srgb,int samples)62 build_resolve_compute_shader(struct radv_device *dev, bool is_integer, bool is_srgb, int samples)
63 {
64    const struct glsl_type *sampler_type =
65       glsl_sampler_type(GLSL_SAMPLER_DIM_MS, false, false, GLSL_TYPE_FLOAT);
66    const struct glsl_type *img_type = glsl_image_type(GLSL_SAMPLER_DIM_2D, false, GLSL_TYPE_FLOAT);
67    nir_builder b = radv_meta_init_shader(MESA_SHADER_COMPUTE, "meta_resolve_cs-%d-%s", samples,
68                                          is_integer ? "int" : (is_srgb ? "srgb" : "float"));
69    b.shader->info.workgroup_size[0] = 8;
70    b.shader->info.workgroup_size[1] = 8;
71 
72    nir_variable *input_img = nir_variable_create(b.shader, nir_var_uniform, sampler_type, "s_tex");
73    input_img->data.descriptor_set = 0;
74    input_img->data.binding = 0;
75 
76    nir_variable *output_img = nir_variable_create(b.shader, nir_var_image, img_type, "out_img");
77    output_img->data.descriptor_set = 0;
78    output_img->data.binding = 1;
79 
80    nir_ssa_def *global_id = get_global_ids(&b, 2);
81 
82    nir_ssa_def *src_offset = nir_load_push_constant(&b, 2, 32, nir_imm_int(&b, 0), .range = 16);
83    nir_ssa_def *dst_offset = nir_load_push_constant(&b, 2, 32, nir_imm_int(&b, 8), .range = 16);
84 
85    nir_ssa_def *src_coord = nir_iadd(&b, global_id, src_offset);
86    nir_ssa_def *dst_coord = nir_iadd(&b, global_id, dst_offset);
87 
88    nir_variable *color = nir_local_variable_create(b.impl, glsl_vec4_type(), "color");
89 
90    radv_meta_build_resolve_shader_core(&b, is_integer, samples, input_img, color, src_coord);
91 
92    nir_ssa_def *outval = nir_load_var(&b, color);
93    if (is_srgb)
94       outval = radv_meta_build_resolve_srgb_conversion(&b, outval);
95 
96    nir_ssa_def *img_coord = nir_vec4(&b, nir_channel(&b, dst_coord, 0),
97                                          nir_channel(&b, dst_coord, 1),
98                                          nir_ssa_undef(&b, 1, 32),
99                                          nir_ssa_undef(&b, 1, 32));
100 
101    nir_image_deref_store(&b, &nir_build_deref_var(&b, output_img)->dest.ssa, img_coord,
102                          nir_ssa_undef(&b, 1, 32), outval, nir_imm_int(&b, 0),
103                          .image_dim = GLSL_SAMPLER_DIM_2D);
104    return b.shader;
105 }
106 
107 enum {
108    DEPTH_RESOLVE,
109    STENCIL_RESOLVE,
110 };
111 
112 static const char *
get_resolve_mode_str(VkResolveModeFlagBits resolve_mode)113 get_resolve_mode_str(VkResolveModeFlagBits resolve_mode)
114 {
115    switch (resolve_mode) {
116    case VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR:
117       return "zero";
118    case VK_RESOLVE_MODE_AVERAGE_BIT_KHR:
119       return "average";
120    case VK_RESOLVE_MODE_MIN_BIT_KHR:
121       return "min";
122    case VK_RESOLVE_MODE_MAX_BIT_KHR:
123       return "max";
124    default:
125       unreachable("invalid resolve mode");
126    }
127 }
128 
129 static nir_shader *
build_depth_stencil_resolve_compute_shader(struct radv_device * dev,int samples,int index,VkResolveModeFlagBits resolve_mode)130 build_depth_stencil_resolve_compute_shader(struct radv_device *dev, int samples, int index,
131                                            VkResolveModeFlagBits resolve_mode)
132 {
133    const struct glsl_type *sampler_type =
134       glsl_sampler_type(GLSL_SAMPLER_DIM_MS, false, true, GLSL_TYPE_FLOAT);
135    const struct glsl_type *img_type = glsl_image_type(GLSL_SAMPLER_DIM_2D, true, GLSL_TYPE_FLOAT);
136 
137    nir_builder b = radv_meta_init_shader(MESA_SHADER_COMPUTE, "meta_resolve_cs_%s-%s-%d",
138                                          index == DEPTH_RESOLVE ? "depth" : "stencil",
139                                          get_resolve_mode_str(resolve_mode), samples);
140    b.shader->info.workgroup_size[0] = 8;
141    b.shader->info.workgroup_size[1] = 8;
142 
143    nir_variable *input_img = nir_variable_create(b.shader, nir_var_uniform, sampler_type, "s_tex");
144    input_img->data.descriptor_set = 0;
145    input_img->data.binding = 0;
146 
147    nir_variable *output_img = nir_variable_create(b.shader, nir_var_image, img_type, "out_img");
148    output_img->data.descriptor_set = 0;
149    output_img->data.binding = 1;
150 
151    nir_ssa_def *img_coord = get_global_ids(&b, 3);
152 
153    nir_ssa_def *input_img_deref = &nir_build_deref_var(&b, input_img)->dest.ssa;
154 
155    nir_alu_type type = index == DEPTH_RESOLVE ? nir_type_float32 : nir_type_uint32;
156 
157    nir_tex_instr *tex = nir_tex_instr_create(b.shader, 3);
158    tex->sampler_dim = GLSL_SAMPLER_DIM_MS;
159    tex->op = nir_texop_txf_ms;
160    tex->src[0].src_type = nir_tex_src_coord;
161    tex->src[0].src = nir_src_for_ssa(img_coord);
162    tex->src[1].src_type = nir_tex_src_ms_index;
163    tex->src[1].src = nir_src_for_ssa(nir_imm_int(&b, 0));
164    tex->src[2].src_type = nir_tex_src_texture_deref;
165    tex->src[2].src = nir_src_for_ssa(input_img_deref);
166    tex->dest_type = type;
167    tex->is_array = true;
168    tex->coord_components = 3;
169 
170    nir_ssa_dest_init(&tex->instr, &tex->dest, 4, 32, "tex");
171    nir_builder_instr_insert(&b, &tex->instr);
172 
173    nir_ssa_def *outval = &tex->dest.ssa;
174 
175    if (resolve_mode != VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR) {
176       for (int i = 1; i < samples; i++) {
177          nir_tex_instr *tex_add = nir_tex_instr_create(b.shader, 3);
178          tex_add->sampler_dim = GLSL_SAMPLER_DIM_MS;
179          tex_add->op = nir_texop_txf_ms;
180          tex_add->src[0].src_type = nir_tex_src_coord;
181          tex_add->src[0].src = nir_src_for_ssa(img_coord);
182          tex_add->src[1].src_type = nir_tex_src_ms_index;
183          tex_add->src[1].src = nir_src_for_ssa(nir_imm_int(&b, i));
184          tex_add->src[2].src_type = nir_tex_src_texture_deref;
185          tex_add->src[2].src = nir_src_for_ssa(input_img_deref);
186          tex_add->dest_type = type;
187          tex_add->is_array = true;
188          tex_add->coord_components = 3;
189 
190          nir_ssa_dest_init(&tex_add->instr, &tex_add->dest, 4, 32, "tex");
191          nir_builder_instr_insert(&b, &tex_add->instr);
192 
193          switch (resolve_mode) {
194          case VK_RESOLVE_MODE_AVERAGE_BIT_KHR:
195             assert(index == DEPTH_RESOLVE);
196             outval = nir_fadd(&b, outval, &tex_add->dest.ssa);
197             break;
198          case VK_RESOLVE_MODE_MIN_BIT_KHR:
199             if (index == DEPTH_RESOLVE)
200                outval = nir_fmin(&b, outval, &tex_add->dest.ssa);
201             else
202                outval = nir_umin(&b, outval, &tex_add->dest.ssa);
203             break;
204          case VK_RESOLVE_MODE_MAX_BIT_KHR:
205             if (index == DEPTH_RESOLVE)
206                outval = nir_fmax(&b, outval, &tex_add->dest.ssa);
207             else
208                outval = nir_umax(&b, outval, &tex_add->dest.ssa);
209             break;
210          default:
211             unreachable("invalid resolve mode");
212          }
213       }
214 
215       if (resolve_mode == VK_RESOLVE_MODE_AVERAGE_BIT_KHR)
216          outval = nir_fdiv(&b, outval, nir_imm_float(&b, samples));
217    }
218 
219    nir_ssa_def *coord = nir_vec4(&b, nir_channel(&b, img_coord, 0), nir_channel(&b, img_coord, 1),
220                                  nir_channel(&b, img_coord, 2), nir_ssa_undef(&b, 1, 32));
221    nir_image_deref_store(&b, &nir_build_deref_var(&b, output_img)->dest.ssa, coord,
222                          nir_ssa_undef(&b, 1, 32), outval, nir_imm_int(&b, 0),
223                          .image_dim = GLSL_SAMPLER_DIM_2D, .image_array = true);
224    return b.shader;
225 }
226 
227 static VkResult
create_layout(struct radv_device * device)228 create_layout(struct radv_device *device)
229 {
230    VkResult result;
231    /*
232     * two descriptors one for the image being sampled
233     * one for the buffer being written.
234     */
235    VkDescriptorSetLayoutCreateInfo ds_create_info = {
236       .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
237       .flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
238       .bindingCount = 2,
239       .pBindings = (VkDescriptorSetLayoutBinding[]){
240          {.binding = 0,
241           .descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
242           .descriptorCount = 1,
243           .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
244           .pImmutableSamplers = NULL},
245          {.binding = 1,
246           .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
247           .descriptorCount = 1,
248           .stageFlags = VK_SHADER_STAGE_COMPUTE_BIT,
249           .pImmutableSamplers = NULL},
250       }};
251 
252    result = radv_CreateDescriptorSetLayout(radv_device_to_handle(device), &ds_create_info,
253                                            &device->meta_state.alloc,
254                                            &device->meta_state.resolve_compute.ds_layout);
255    if (result != VK_SUCCESS)
256       goto fail;
257 
258    VkPipelineLayoutCreateInfo pl_create_info = {
259       .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
260       .setLayoutCount = 1,
261       .pSetLayouts = &device->meta_state.resolve_compute.ds_layout,
262       .pushConstantRangeCount = 1,
263       .pPushConstantRanges = &(VkPushConstantRange){VK_SHADER_STAGE_COMPUTE_BIT, 0, 16},
264    };
265 
266    result = radv_CreatePipelineLayout(radv_device_to_handle(device), &pl_create_info,
267                                       &device->meta_state.alloc,
268                                       &device->meta_state.resolve_compute.p_layout);
269    if (result != VK_SUCCESS)
270       goto fail;
271    return VK_SUCCESS;
272 fail:
273    return result;
274 }
275 
276 static VkResult
create_resolve_pipeline(struct radv_device * device,int samples,bool is_integer,bool is_srgb,VkPipeline * pipeline)277 create_resolve_pipeline(struct radv_device *device, int samples, bool is_integer, bool is_srgb,
278                         VkPipeline *pipeline)
279 {
280    VkResult result;
281 
282    mtx_lock(&device->meta_state.mtx);
283    if (*pipeline) {
284       mtx_unlock(&device->meta_state.mtx);
285       return VK_SUCCESS;
286    }
287 
288    nir_shader *cs = build_resolve_compute_shader(device, is_integer, is_srgb, samples);
289 
290    /* compute shader */
291 
292    VkPipelineShaderStageCreateInfo pipeline_shader_stage = {
293       .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
294       .stage = VK_SHADER_STAGE_COMPUTE_BIT,
295       .module = vk_shader_module_handle_from_nir(cs),
296       .pName = "main",
297       .pSpecializationInfo = NULL,
298    };
299 
300    VkComputePipelineCreateInfo vk_pipeline_info = {
301       .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
302       .stage = pipeline_shader_stage,
303       .flags = 0,
304       .layout = device->meta_state.resolve_compute.p_layout,
305    };
306 
307    result = radv_CreateComputePipelines(radv_device_to_handle(device),
308                                         radv_pipeline_cache_to_handle(&device->meta_state.cache), 1,
309                                         &vk_pipeline_info, NULL, pipeline);
310    if (result != VK_SUCCESS)
311       goto fail;
312 
313    ralloc_free(cs);
314    mtx_unlock(&device->meta_state.mtx);
315    return VK_SUCCESS;
316 fail:
317    ralloc_free(cs);
318    mtx_unlock(&device->meta_state.mtx);
319    return result;
320 }
321 
322 static VkResult
create_depth_stencil_resolve_pipeline(struct radv_device * device,int samples,int index,VkResolveModeFlagBits resolve_mode,VkPipeline * pipeline)323 create_depth_stencil_resolve_pipeline(struct radv_device *device, int samples, int index,
324                                       VkResolveModeFlagBits resolve_mode, VkPipeline *pipeline)
325 {
326    VkResult result;
327 
328    mtx_lock(&device->meta_state.mtx);
329    if (*pipeline) {
330       mtx_unlock(&device->meta_state.mtx);
331       return VK_SUCCESS;
332    }
333 
334    nir_shader *cs =
335       build_depth_stencil_resolve_compute_shader(device, samples, index, resolve_mode);
336 
337    /* compute shader */
338    VkPipelineShaderStageCreateInfo pipeline_shader_stage = {
339       .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
340       .stage = VK_SHADER_STAGE_COMPUTE_BIT,
341       .module = vk_shader_module_handle_from_nir(cs),
342       .pName = "main",
343       .pSpecializationInfo = NULL,
344    };
345 
346    VkComputePipelineCreateInfo vk_pipeline_info = {
347       .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
348       .stage = pipeline_shader_stage,
349       .flags = 0,
350       .layout = device->meta_state.resolve_compute.p_layout,
351    };
352 
353    result = radv_CreateComputePipelines(radv_device_to_handle(device),
354                                         radv_pipeline_cache_to_handle(&device->meta_state.cache), 1,
355                                         &vk_pipeline_info, NULL, pipeline);
356    if (result != VK_SUCCESS)
357       goto fail;
358 
359    ralloc_free(cs);
360    mtx_unlock(&device->meta_state.mtx);
361    return VK_SUCCESS;
362 fail:
363    ralloc_free(cs);
364    mtx_unlock(&device->meta_state.mtx);
365    return result;
366 }
367 
368 VkResult
radv_device_init_meta_resolve_compute_state(struct radv_device * device,bool on_demand)369 radv_device_init_meta_resolve_compute_state(struct radv_device *device, bool on_demand)
370 {
371    struct radv_meta_state *state = &device->meta_state;
372    VkResult res;
373 
374    res = create_layout(device);
375    if (res != VK_SUCCESS)
376       goto fail;
377 
378    if (on_demand)
379       return VK_SUCCESS;
380 
381    for (uint32_t i = 0; i < MAX_SAMPLES_LOG2; ++i) {
382       uint32_t samples = 1 << i;
383 
384       res = create_resolve_pipeline(device, samples, false, false,
385                                     &state->resolve_compute.rc[i].pipeline);
386       if (res != VK_SUCCESS)
387          goto fail;
388 
389       res = create_resolve_pipeline(device, samples, true, false,
390                                     &state->resolve_compute.rc[i].i_pipeline);
391       if (res != VK_SUCCESS)
392          goto fail;
393 
394       res = create_resolve_pipeline(device, samples, false, true,
395                                     &state->resolve_compute.rc[i].srgb_pipeline);
396       if (res != VK_SUCCESS)
397          goto fail;
398 
399       res = create_depth_stencil_resolve_pipeline(
400          device, samples, DEPTH_RESOLVE, VK_RESOLVE_MODE_AVERAGE_BIT_KHR,
401          &state->resolve_compute.depth[i].average_pipeline);
402       if (res != VK_SUCCESS)
403          goto fail;
404 
405       res = create_depth_stencil_resolve_pipeline(device, samples, DEPTH_RESOLVE,
406                                                   VK_RESOLVE_MODE_MAX_BIT_KHR,
407                                                   &state->resolve_compute.depth[i].max_pipeline);
408       if (res != VK_SUCCESS)
409          goto fail;
410 
411       res = create_depth_stencil_resolve_pipeline(device, samples, DEPTH_RESOLVE,
412                                                   VK_RESOLVE_MODE_MIN_BIT_KHR,
413                                                   &state->resolve_compute.depth[i].min_pipeline);
414       if (res != VK_SUCCESS)
415          goto fail;
416 
417       res = create_depth_stencil_resolve_pipeline(device, samples, STENCIL_RESOLVE,
418                                                   VK_RESOLVE_MODE_MAX_BIT_KHR,
419                                                   &state->resolve_compute.stencil[i].max_pipeline);
420       if (res != VK_SUCCESS)
421          goto fail;
422 
423       res = create_depth_stencil_resolve_pipeline(device, samples, STENCIL_RESOLVE,
424                                                   VK_RESOLVE_MODE_MIN_BIT_KHR,
425                                                   &state->resolve_compute.stencil[i].min_pipeline);
426       if (res != VK_SUCCESS)
427          goto fail;
428    }
429 
430    res = create_depth_stencil_resolve_pipeline(device, 0, DEPTH_RESOLVE,
431                                                VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR,
432                                                &state->resolve_compute.depth_zero_pipeline);
433    if (res != VK_SUCCESS)
434       goto fail;
435 
436    res = create_depth_stencil_resolve_pipeline(device, 0, STENCIL_RESOLVE,
437                                                VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR,
438                                                &state->resolve_compute.stencil_zero_pipeline);
439    if (res != VK_SUCCESS)
440       goto fail;
441 
442    return VK_SUCCESS;
443 fail:
444    radv_device_finish_meta_resolve_compute_state(device);
445    return res;
446 }
447 
448 void
radv_device_finish_meta_resolve_compute_state(struct radv_device * device)449 radv_device_finish_meta_resolve_compute_state(struct radv_device *device)
450 {
451    struct radv_meta_state *state = &device->meta_state;
452    for (uint32_t i = 0; i < MAX_SAMPLES_LOG2; ++i) {
453       radv_DestroyPipeline(radv_device_to_handle(device), state->resolve_compute.rc[i].pipeline,
454                            &state->alloc);
455 
456       radv_DestroyPipeline(radv_device_to_handle(device), state->resolve_compute.rc[i].i_pipeline,
457                            &state->alloc);
458 
459       radv_DestroyPipeline(radv_device_to_handle(device),
460                            state->resolve_compute.rc[i].srgb_pipeline, &state->alloc);
461 
462       radv_DestroyPipeline(radv_device_to_handle(device),
463                            state->resolve_compute.depth[i].average_pipeline, &state->alloc);
464 
465       radv_DestroyPipeline(radv_device_to_handle(device),
466                            state->resolve_compute.depth[i].max_pipeline, &state->alloc);
467 
468       radv_DestroyPipeline(radv_device_to_handle(device),
469                            state->resolve_compute.depth[i].min_pipeline, &state->alloc);
470 
471       radv_DestroyPipeline(radv_device_to_handle(device),
472                            state->resolve_compute.stencil[i].max_pipeline, &state->alloc);
473 
474       radv_DestroyPipeline(radv_device_to_handle(device),
475                            state->resolve_compute.stencil[i].min_pipeline, &state->alloc);
476    }
477 
478    radv_DestroyPipeline(radv_device_to_handle(device), state->resolve_compute.depth_zero_pipeline,
479                         &state->alloc);
480 
481    radv_DestroyPipeline(radv_device_to_handle(device), state->resolve_compute.stencil_zero_pipeline,
482                         &state->alloc);
483 
484    radv_DestroyDescriptorSetLayout(radv_device_to_handle(device), state->resolve_compute.ds_layout,
485                                    &state->alloc);
486    radv_DestroyPipelineLayout(radv_device_to_handle(device), state->resolve_compute.p_layout,
487                               &state->alloc);
488 }
489 
490 static VkPipeline *
radv_get_resolve_pipeline(struct radv_cmd_buffer * cmd_buffer,struct radv_image_view * src_iview)491 radv_get_resolve_pipeline(struct radv_cmd_buffer *cmd_buffer, struct radv_image_view *src_iview)
492 {
493    struct radv_device *device = cmd_buffer->device;
494    struct radv_meta_state *state = &device->meta_state;
495    uint32_t samples = src_iview->image->info.samples;
496    uint32_t samples_log2 = ffs(samples) - 1;
497    VkPipeline *pipeline;
498 
499    if (vk_format_is_int(src_iview->vk_format))
500       pipeline = &state->resolve_compute.rc[samples_log2].i_pipeline;
501    else if (vk_format_is_srgb(src_iview->vk_format))
502       pipeline = &state->resolve_compute.rc[samples_log2].srgb_pipeline;
503    else
504       pipeline = &state->resolve_compute.rc[samples_log2].pipeline;
505 
506    if (!*pipeline) {
507       VkResult ret;
508 
509       ret = create_resolve_pipeline(device, samples, vk_format_is_int(src_iview->vk_format),
510                                     vk_format_is_srgb(src_iview->vk_format), pipeline);
511       if (ret != VK_SUCCESS) {
512          cmd_buffer->record_result = ret;
513          return NULL;
514       }
515    }
516 
517    return pipeline;
518 }
519 
520 static void
emit_resolve(struct radv_cmd_buffer * cmd_buffer,struct radv_image_view * src_iview,struct radv_image_view * dest_iview,const VkOffset2D * src_offset,const VkOffset2D * dest_offset,const VkExtent2D * resolve_extent)521 emit_resolve(struct radv_cmd_buffer *cmd_buffer, struct radv_image_view *src_iview,
522              struct radv_image_view *dest_iview, const VkOffset2D *src_offset,
523              const VkOffset2D *dest_offset, const VkExtent2D *resolve_extent)
524 {
525    struct radv_device *device = cmd_buffer->device;
526    VkPipeline *pipeline;
527 
528    radv_meta_push_descriptor_set(
529       cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, device->meta_state.resolve_compute.p_layout,
530       0, /* set */
531       2, /* descriptorWriteCount */
532       (VkWriteDescriptorSet[]){{.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
533                                 .dstBinding = 0,
534                                 .dstArrayElement = 0,
535                                 .descriptorCount = 1,
536                                 .descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
537                                 .pImageInfo =
538                                    (VkDescriptorImageInfo[]){
539                                       {.sampler = VK_NULL_HANDLE,
540                                        .imageView = radv_image_view_to_handle(src_iview),
541                                        .imageLayout = VK_IMAGE_LAYOUT_GENERAL},
542                                    }},
543                                {.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
544                                 .dstBinding = 1,
545                                 .dstArrayElement = 0,
546                                 .descriptorCount = 1,
547                                 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
548                                 .pImageInfo = (VkDescriptorImageInfo[]){
549                                    {
550                                       .sampler = VK_NULL_HANDLE,
551                                       .imageView = radv_image_view_to_handle(dest_iview),
552                                       .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
553                                    },
554                                 }}});
555 
556    pipeline = radv_get_resolve_pipeline(cmd_buffer, src_iview);
557 
558    radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer), VK_PIPELINE_BIND_POINT_COMPUTE,
559                         *pipeline);
560 
561    unsigned push_constants[4] = {
562       src_offset->x,
563       src_offset->y,
564       dest_offset->x,
565       dest_offset->y,
566    };
567    radv_CmdPushConstants(radv_cmd_buffer_to_handle(cmd_buffer),
568                          device->meta_state.resolve_compute.p_layout, VK_SHADER_STAGE_COMPUTE_BIT,
569                          0, 16, push_constants);
570    radv_unaligned_dispatch(cmd_buffer, resolve_extent->width, resolve_extent->height, 1);
571 }
572 
573 static void
emit_depth_stencil_resolve(struct radv_cmd_buffer * cmd_buffer,struct radv_image_view * src_iview,struct radv_image_view * dest_iview,const VkExtent3D * resolve_extent,VkImageAspectFlags aspects,VkResolveModeFlagBits resolve_mode)574 emit_depth_stencil_resolve(struct radv_cmd_buffer *cmd_buffer, struct radv_image_view *src_iview,
575                            struct radv_image_view *dest_iview, const VkExtent3D *resolve_extent,
576                            VkImageAspectFlags aspects, VkResolveModeFlagBits resolve_mode)
577 {
578    struct radv_device *device = cmd_buffer->device;
579    const uint32_t samples = src_iview->image->info.samples;
580    const uint32_t samples_log2 = ffs(samples) - 1;
581    VkPipeline *pipeline;
582 
583    radv_meta_push_descriptor_set(
584       cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, device->meta_state.resolve_compute.p_layout,
585       0, /* set */
586       2, /* descriptorWriteCount */
587       (VkWriteDescriptorSet[]){{.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
588                                 .dstBinding = 0,
589                                 .dstArrayElement = 0,
590                                 .descriptorCount = 1,
591                                 .descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
592                                 .pImageInfo =
593                                    (VkDescriptorImageInfo[]){
594                                       {.sampler = VK_NULL_HANDLE,
595                                        .imageView = radv_image_view_to_handle(src_iview),
596                                        .imageLayout = VK_IMAGE_LAYOUT_GENERAL},
597                                    }},
598                                {.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
599                                 .dstBinding = 1,
600                                 .dstArrayElement = 0,
601                                 .descriptorCount = 1,
602                                 .descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
603                                 .pImageInfo = (VkDescriptorImageInfo[]){
604                                    {
605                                       .sampler = VK_NULL_HANDLE,
606                                       .imageView = radv_image_view_to_handle(dest_iview),
607                                       .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
608                                    },
609                                 }}});
610 
611    switch (resolve_mode) {
612    case VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR:
613       if (aspects == VK_IMAGE_ASPECT_DEPTH_BIT)
614          pipeline = &device->meta_state.resolve_compute.depth_zero_pipeline;
615       else
616          pipeline = &device->meta_state.resolve_compute.stencil_zero_pipeline;
617       break;
618    case VK_RESOLVE_MODE_AVERAGE_BIT_KHR:
619       assert(aspects == VK_IMAGE_ASPECT_DEPTH_BIT);
620       pipeline = &device->meta_state.resolve_compute.depth[samples_log2].average_pipeline;
621       break;
622    case VK_RESOLVE_MODE_MIN_BIT_KHR:
623       if (aspects == VK_IMAGE_ASPECT_DEPTH_BIT)
624          pipeline = &device->meta_state.resolve_compute.depth[samples_log2].min_pipeline;
625       else
626          pipeline = &device->meta_state.resolve_compute.stencil[samples_log2].min_pipeline;
627       break;
628    case VK_RESOLVE_MODE_MAX_BIT_KHR:
629       if (aspects == VK_IMAGE_ASPECT_DEPTH_BIT)
630          pipeline = &device->meta_state.resolve_compute.depth[samples_log2].max_pipeline;
631       else
632          pipeline = &device->meta_state.resolve_compute.stencil[samples_log2].max_pipeline;
633       break;
634    default:
635       unreachable("invalid resolve mode");
636    }
637 
638    if (!*pipeline) {
639       int index = aspects == VK_IMAGE_ASPECT_DEPTH_BIT ? DEPTH_RESOLVE : STENCIL_RESOLVE;
640       VkResult ret;
641 
642       ret = create_depth_stencil_resolve_pipeline(device, samples, index, resolve_mode, pipeline);
643       if (ret != VK_SUCCESS) {
644          cmd_buffer->record_result = ret;
645          return;
646       }
647    }
648 
649    radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer), VK_PIPELINE_BIND_POINT_COMPUTE,
650                         *pipeline);
651 
652    radv_unaligned_dispatch(cmd_buffer, resolve_extent->width, resolve_extent->height,
653                            resolve_extent->depth);
654 }
655 
656 void
radv_meta_resolve_compute_image(struct radv_cmd_buffer * cmd_buffer,struct radv_image * src_image,VkFormat src_format,VkImageLayout src_image_layout,struct radv_image * dest_image,VkFormat dest_format,VkImageLayout dest_image_layout,const VkImageResolve2KHR * region)657 radv_meta_resolve_compute_image(struct radv_cmd_buffer *cmd_buffer, struct radv_image *src_image,
658                                 VkFormat src_format, VkImageLayout src_image_layout,
659                                 struct radv_image *dest_image, VkFormat dest_format,
660                                 VkImageLayout dest_image_layout, const VkImageResolve2KHR *region)
661 {
662    struct radv_meta_saved_state saved_state;
663 
664    radv_decompress_resolve_src(cmd_buffer, src_image, src_image_layout, region);
665 
666    /* For partial resolves, DCC should be decompressed before resolving
667     * because the metadata is re-initialized to the uncompressed after.
668     */
669    uint32_t queue_mask = radv_image_queue_family_mask(dest_image, cmd_buffer->qf,
670                                                       cmd_buffer->qf);
671 
672    if (!radv_image_use_dcc_image_stores(cmd_buffer->device, dest_image) &&
673        radv_layout_dcc_compressed(cmd_buffer->device, dest_image, region->dstSubresource.mipLevel,
674                                   dest_image_layout, false, queue_mask) &&
675        (region->dstOffset.x || region->dstOffset.y || region->dstOffset.z ||
676         region->extent.width != dest_image->info.width ||
677         region->extent.height != dest_image->info.height ||
678         region->extent.depth != dest_image->info.depth)) {
679       radv_decompress_dcc(cmd_buffer, dest_image,
680                           &(VkImageSubresourceRange){
681                              .aspectMask = region->dstSubresource.aspectMask,
682                              .baseMipLevel = region->dstSubresource.mipLevel,
683                              .levelCount = 1,
684                              .baseArrayLayer = region->dstSubresource.baseArrayLayer,
685                              .layerCount = region->dstSubresource.layerCount,
686                           });
687    }
688 
689    radv_meta_save(
690       &saved_state, cmd_buffer,
691       RADV_META_SAVE_COMPUTE_PIPELINE | RADV_META_SAVE_CONSTANTS | RADV_META_SAVE_DESCRIPTORS);
692 
693    assert(region->srcSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
694    assert(region->dstSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
695    assert(region->srcSubresource.layerCount == region->dstSubresource.layerCount);
696 
697    const uint32_t src_base_layer =
698       radv_meta_get_iview_layer(src_image, &region->srcSubresource, &region->srcOffset);
699 
700    const uint32_t dest_base_layer =
701       radv_meta_get_iview_layer(dest_image, &region->dstSubresource, &region->dstOffset);
702 
703    const struct VkExtent3D extent = radv_sanitize_image_extent(src_image->type, region->extent);
704    const struct VkOffset3D srcOffset =
705       radv_sanitize_image_offset(src_image->type, region->srcOffset);
706    const struct VkOffset3D dstOffset =
707       radv_sanitize_image_offset(dest_image->type, region->dstOffset);
708 
709    for (uint32_t layer = 0; layer < region->srcSubresource.layerCount; ++layer) {
710 
711       struct radv_image_view src_iview;
712       radv_image_view_init(&src_iview, cmd_buffer->device,
713                            &(VkImageViewCreateInfo){
714                               .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
715                               .image = radv_image_to_handle(src_image),
716                               .viewType = radv_meta_get_view_type(src_image),
717                               .format = src_format,
718                               .subresourceRange =
719                                  {
720                                     .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
721                                     .baseMipLevel = region->srcSubresource.mipLevel,
722                                     .levelCount = 1,
723                                     .baseArrayLayer = src_base_layer + layer,
724                                     .layerCount = 1,
725                                  },
726                            },
727                            NULL);
728 
729       struct radv_image_view dest_iview;
730       radv_image_view_init(&dest_iview, cmd_buffer->device,
731                            &(VkImageViewCreateInfo){
732                               .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
733                               .image = radv_image_to_handle(dest_image),
734                               .viewType = radv_meta_get_view_type(dest_image),
735                               .format = vk_to_non_srgb_format(dest_format),
736                               .subresourceRange =
737                                  {
738                                     .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
739                                     .baseMipLevel = region->dstSubresource.mipLevel,
740                                     .levelCount = 1,
741                                     .baseArrayLayer = dest_base_layer + layer,
742                                     .layerCount = 1,
743                                  },
744                            },
745                            NULL);
746 
747       emit_resolve(cmd_buffer, &src_iview, &dest_iview, &(VkOffset2D){srcOffset.x, srcOffset.y},
748                    &(VkOffset2D){dstOffset.x, dstOffset.y},
749                    &(VkExtent2D){extent.width, extent.height});
750 
751       radv_image_view_finish(&src_iview);
752       radv_image_view_finish(&dest_iview);
753    }
754 
755    radv_meta_restore(&saved_state, cmd_buffer);
756 
757    if (!radv_image_use_dcc_image_stores(cmd_buffer->device, dest_image) &&
758        radv_layout_dcc_compressed(cmd_buffer->device, dest_image, region->dstSubresource.mipLevel,
759                                   dest_image_layout, false, queue_mask)) {
760 
761       cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_CS_PARTIAL_FLUSH | RADV_CMD_FLAG_INV_VCACHE;
762 
763       VkImageSubresourceRange range = {
764          .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
765          .baseMipLevel = region->dstSubresource.mipLevel,
766          .levelCount = 1,
767          .baseArrayLayer = dest_base_layer,
768          .layerCount = region->dstSubresource.layerCount,
769       };
770 
771       cmd_buffer->state.flush_bits |= radv_init_dcc(cmd_buffer, dest_image, &range, 0xffffffff);
772    }
773 }
774 
775 /**
776  * Emit any needed resolves for the current subpass.
777  */
778 void
radv_cmd_buffer_resolve_subpass_cs(struct radv_cmd_buffer * cmd_buffer)779 radv_cmd_buffer_resolve_subpass_cs(struct radv_cmd_buffer *cmd_buffer)
780 {
781    struct radv_framebuffer *fb = cmd_buffer->state.framebuffer;
782    const struct radv_subpass *subpass = cmd_buffer->state.subpass;
783    struct radv_subpass_barrier barrier;
784    uint32_t layer_count = fb->layers;
785 
786    if (subpass->view_mask)
787       layer_count = util_last_bit(subpass->view_mask);
788 
789    /* Resolves happen before the end-of-subpass barriers get executed, so
790     * we have to make the attachment shader-readable.
791     */
792    barrier.src_stage_mask = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR;
793    barrier.src_access_mask = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR;
794    barrier.dst_access_mask = VK_ACCESS_2_SHADER_READ_BIT_KHR | VK_ACCESS_2_SHADER_WRITE_BIT_KHR;
795    radv_emit_subpass_barrier(cmd_buffer, &barrier);
796 
797    for (uint32_t i = 0; i < subpass->color_count; ++i) {
798       struct radv_subpass_attachment src_att = subpass->color_attachments[i];
799       struct radv_subpass_attachment dst_att = subpass->resolve_attachments[i];
800 
801       if (dst_att.attachment == VK_ATTACHMENT_UNUSED)
802          continue;
803 
804       struct radv_image_view *src_iview = cmd_buffer->state.attachments[src_att.attachment].iview;
805       struct radv_image_view *dst_iview = cmd_buffer->state.attachments[dst_att.attachment].iview;
806 
807       VkImageResolve2KHR region = {
808          .sType = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR,
809          .extent = (VkExtent3D){fb->width, fb->height, 1},
810          .srcSubresource =
811             (VkImageSubresourceLayers){
812                .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
813                .mipLevel = src_iview->base_mip,
814                .baseArrayLayer = src_iview->base_layer,
815                .layerCount = layer_count,
816             },
817          .dstSubresource =
818             (VkImageSubresourceLayers){
819                .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
820                .mipLevel = dst_iview->base_mip,
821                .baseArrayLayer = dst_iview->base_layer,
822                .layerCount = layer_count,
823             },
824          .srcOffset = (VkOffset3D){0, 0, 0},
825          .dstOffset = (VkOffset3D){0, 0, 0},
826       };
827 
828       radv_meta_resolve_compute_image(cmd_buffer, src_iview->image, src_iview->vk_format,
829                                       src_att.layout, dst_iview->image, dst_iview->vk_format,
830                                       dst_att.layout, &region);
831    }
832 
833    cmd_buffer->state.flush_bits |=
834       RADV_CMD_FLAG_CS_PARTIAL_FLUSH | RADV_CMD_FLAG_INV_VCACHE |
835       radv_src_access_flush(cmd_buffer, VK_ACCESS_2_SHADER_WRITE_BIT_KHR, NULL);
836 }
837 
838 void
radv_depth_stencil_resolve_subpass_cs(struct radv_cmd_buffer * cmd_buffer,VkImageAspectFlags aspects,VkResolveModeFlagBits resolve_mode)839 radv_depth_stencil_resolve_subpass_cs(struct radv_cmd_buffer *cmd_buffer,
840                                       VkImageAspectFlags aspects,
841                                       VkResolveModeFlagBits resolve_mode)
842 {
843    struct radv_framebuffer *fb = cmd_buffer->state.framebuffer;
844    const struct radv_subpass *subpass = cmd_buffer->state.subpass;
845    struct radv_meta_saved_state saved_state;
846    uint32_t layer_count = fb->layers;
847 
848    if (subpass->view_mask)
849       layer_count = util_last_bit(subpass->view_mask);
850 
851    /* Resolves happen before the end-of-subpass barriers get executed, so
852     * we have to make the attachment shader-readable.
853     */
854    cmd_buffer->state.flush_bits |=
855       radv_src_access_flush(cmd_buffer, VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR, NULL) |
856       radv_dst_access_flush(cmd_buffer, VK_ACCESS_2_SHADER_READ_BIT_KHR, NULL) |
857       radv_dst_access_flush(cmd_buffer, VK_ACCESS_2_SHADER_WRITE_BIT_KHR, NULL);
858 
859    struct radv_subpass_attachment src_att = *subpass->depth_stencil_attachment;
860    struct radv_image_view *src_iview = cmd_buffer->state.attachments[src_att.attachment].iview;
861    struct radv_image *src_image = src_iview->image;
862 
863    VkImageResolve2KHR region = {0};
864    region.sType = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR;
865    region.srcSubresource.aspectMask = aspects;
866    region.srcSubresource.mipLevel = 0;
867    region.srcSubresource.baseArrayLayer = src_iview->base_layer;
868    region.srcSubresource.layerCount = layer_count;
869 
870    radv_decompress_resolve_src(cmd_buffer, src_image, src_att.layout, &region);
871 
872    radv_meta_save(&saved_state, cmd_buffer,
873                   RADV_META_SAVE_COMPUTE_PIPELINE | RADV_META_SAVE_DESCRIPTORS);
874 
875    struct radv_subpass_attachment dest_att = *subpass->ds_resolve_attachment;
876    struct radv_image_view *dst_iview = cmd_buffer->state.attachments[dest_att.attachment].iview;
877    struct radv_image *dst_image = dst_iview->image;
878 
879    struct radv_image_view tsrc_iview;
880    radv_image_view_init(&tsrc_iview, cmd_buffer->device,
881                         &(VkImageViewCreateInfo){
882                            .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
883                            .image = radv_image_to_handle(src_image),
884                            .viewType = radv_meta_get_view_type(src_image),
885                            .format = src_iview->vk_format,
886                            .subresourceRange =
887                               {
888                                  .aspectMask = aspects,
889                                  .baseMipLevel = src_iview->base_mip,
890                                  .levelCount = 1,
891                                  .baseArrayLayer = src_iview->base_layer,
892                                  .layerCount = layer_count,
893                               },
894                         },
895                         NULL);
896 
897    struct radv_image_view tdst_iview;
898    radv_image_view_init(&tdst_iview, cmd_buffer->device,
899                         &(VkImageViewCreateInfo){
900                            .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
901                            .image = radv_image_to_handle(dst_image),
902                            .viewType = radv_meta_get_view_type(dst_image),
903                            .format = dst_iview->vk_format,
904                            .subresourceRange =
905                               {
906                                  .aspectMask = aspects,
907                                  .baseMipLevel = dst_iview->base_mip,
908                                  .levelCount = 1,
909                                  .baseArrayLayer = dst_iview->base_layer,
910                                  .layerCount = layer_count,
911                               },
912                         },
913                         NULL);
914 
915    emit_depth_stencil_resolve(cmd_buffer, &tsrc_iview, &tdst_iview,
916                               &(VkExtent3D){fb->width, fb->height, layer_count}, aspects,
917                               resolve_mode);
918 
919    cmd_buffer->state.flush_bits |=
920       RADV_CMD_FLAG_CS_PARTIAL_FLUSH | RADV_CMD_FLAG_INV_VCACHE |
921       radv_src_access_flush(cmd_buffer, VK_ACCESS_2_SHADER_WRITE_BIT_KHR, NULL);
922 
923    VkImageLayout layout = cmd_buffer->state.attachments[dest_att.attachment].current_layout;
924    uint32_t queue_mask = radv_image_queue_family_mask(dst_image, cmd_buffer->qf,
925                                                       cmd_buffer->qf);
926 
927    if (radv_layout_is_htile_compressed(cmd_buffer->device, dst_image, layout, false, queue_mask)) {
928       VkImageSubresourceRange range = {0};
929       range.aspectMask = aspects;
930       range.baseMipLevel = dst_iview->base_mip;
931       range.levelCount = 1;
932       range.baseArrayLayer = dst_iview->base_layer;
933       range.layerCount = layer_count;
934 
935       uint32_t htile_value = radv_get_htile_initial_value(cmd_buffer->device, dst_image);
936 
937       cmd_buffer->state.flush_bits |= radv_clear_htile(cmd_buffer, dst_image, &range, htile_value);
938    }
939 
940    radv_image_view_finish(&tsrc_iview);
941    radv_image_view_finish(&tdst_iview);
942 
943    radv_meta_restore(&saved_state, cmd_buffer);
944 }
945