1 /*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "GrVkUtil.h"
9
10 #include "vk/GrVkGpu.h"
11 #include "SkSLCompiler.h"
12
GrPixelConfigToVkFormat(GrPixelConfig config,VkFormat * format)13 bool GrPixelConfigToVkFormat(GrPixelConfig config, VkFormat* format) {
14 VkFormat dontCare;
15 if (!format) {
16 format = &dontCare;
17 }
18
19 switch (config) {
20 case kUnknown_GrPixelConfig:
21 return false;
22 case kRGBA_8888_GrPixelConfig:
23 *format = VK_FORMAT_R8G8B8A8_UNORM;
24 return true;
25 case kBGRA_8888_GrPixelConfig:
26 *format = VK_FORMAT_B8G8R8A8_UNORM;
27 return true;
28 case kSRGBA_8888_GrPixelConfig:
29 *format = VK_FORMAT_R8G8B8A8_SRGB;
30 return true;
31 case kSBGRA_8888_GrPixelConfig:
32 *format = VK_FORMAT_B8G8R8A8_SRGB;
33 return true;
34 case kRGB_565_GrPixelConfig:
35 *format = VK_FORMAT_R5G6B5_UNORM_PACK16;
36 return true;
37 case kRGBA_4444_GrPixelConfig:
38 // R4G4B4A4 is not required to be supported so we actually
39 // store the data is if it was B4G4R4A4 and swizzle in shaders
40 *format = VK_FORMAT_B4G4R4A4_UNORM_PACK16;
41 return true;
42 case kAlpha_8_GrPixelConfig: // fall through
43 case kAlpha_8_as_Red_GrPixelConfig:
44 *format = VK_FORMAT_R8_UNORM;
45 return true;
46 case kAlpha_8_as_Alpha_GrPixelConfig:
47 return false;
48 case kGray_8_GrPixelConfig:
49 case kGray_8_as_Red_GrPixelConfig:
50 *format = VK_FORMAT_R8_UNORM;
51 return true;
52 case kGray_8_as_Lum_GrPixelConfig:
53 return false;
54 case kRGBA_float_GrPixelConfig:
55 *format = VK_FORMAT_R32G32B32A32_SFLOAT;
56 return true;
57 case kRG_float_GrPixelConfig:
58 *format = VK_FORMAT_R32G32_SFLOAT;
59 return true;
60 case kRGBA_half_GrPixelConfig:
61 *format = VK_FORMAT_R16G16B16A16_SFLOAT;
62 return true;
63 case kAlpha_half_GrPixelConfig: // fall through
64 case kAlpha_half_as_Red_GrPixelConfig:
65 *format = VK_FORMAT_R16_SFLOAT;
66 return true;
67 }
68 SK_ABORT("Unexpected config");
69 return false;
70 }
71
GrVkFormatToPixelConfig(VkFormat format)72 GrPixelConfig GrVkFormatToPixelConfig(VkFormat format) {
73 switch (format) {
74 case VK_FORMAT_R8G8B8A8_UNORM:
75 return kRGBA_8888_GrPixelConfig;
76 case VK_FORMAT_B8G8R8A8_UNORM:
77 return kBGRA_8888_GrPixelConfig;
78 case VK_FORMAT_R8G8B8A8_SRGB:
79 return kSRGBA_8888_GrPixelConfig;
80 case VK_FORMAT_B8G8R8A8_SRGB:
81 return kSBGRA_8888_GrPixelConfig;
82 case VK_FORMAT_R5G6B5_UNORM_PACK16:
83 return kRGB_565_GrPixelConfig;
84 break;
85 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
86 // R4G4B4A4 is not required to be supported so we actually
87 // store RGBA_4444 data as B4G4R4A4.
88 return kRGBA_4444_GrPixelConfig;
89 case VK_FORMAT_R8_UNORM:
90 return kAlpha_8_GrPixelConfig;
91 case VK_FORMAT_R32G32B32A32_SFLOAT:
92 return kRGBA_float_GrPixelConfig;
93 case VK_FORMAT_R32G32_SFLOAT:
94 return kRG_float_GrPixelConfig;
95 case VK_FORMAT_R16G16B16A16_SFLOAT:
96 return kRGBA_half_GrPixelConfig;
97 case VK_FORMAT_R16_SFLOAT:
98 return kAlpha_half_GrPixelConfig;
99 default:
100 return kUnknown_GrPixelConfig;
101 }
102 }
103
GrVkFormatPixelConfigPairIsValid(VkFormat format,GrPixelConfig config)104 bool GrVkFormatPixelConfigPairIsValid(VkFormat format, GrPixelConfig config) {
105 switch (format) {
106 case VK_FORMAT_R8G8B8A8_UNORM:
107 return kRGBA_8888_GrPixelConfig == config;
108 case VK_FORMAT_B8G8R8A8_UNORM:
109 return kBGRA_8888_GrPixelConfig == config;
110 case VK_FORMAT_R8G8B8A8_SRGB:
111 return kSRGBA_8888_GrPixelConfig == config;
112 case VK_FORMAT_B8G8R8A8_SRGB:
113 return kSBGRA_8888_GrPixelConfig == config;
114 case VK_FORMAT_R5G6B5_UNORM_PACK16:
115 return kRGB_565_GrPixelConfig == config;
116 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
117 // R4G4B4A4 is not required to be supported so we actually
118 // store RGBA_4444 data as B4G4R4A4.
119 return kRGBA_4444_GrPixelConfig == config;
120 case VK_FORMAT_R8_UNORM:
121 return kAlpha_8_GrPixelConfig == config ||
122 kAlpha_8_as_Red_GrPixelConfig == config ||
123 kGray_8_GrPixelConfig == config ||
124 kGray_8_as_Red_GrPixelConfig == config;
125 case VK_FORMAT_R32G32B32A32_SFLOAT:
126 return kRGBA_float_GrPixelConfig == config;
127 case VK_FORMAT_R32G32_SFLOAT:
128 return kRG_float_GrPixelConfig == config;
129 case VK_FORMAT_R16G16B16A16_SFLOAT:
130 return kRGBA_half_GrPixelConfig == config;
131 case VK_FORMAT_R16_SFLOAT:
132 return kAlpha_half_GrPixelConfig == config ||
133 kAlpha_half_as_Red_GrPixelConfig == config;
134 default:
135 return false;
136 }
137 }
138
GrVkFormatIsSupported(VkFormat format)139 bool GrVkFormatIsSupported(VkFormat format) {
140 switch (format) {
141 case VK_FORMAT_R8G8B8A8_UNORM:
142 case VK_FORMAT_B8G8R8A8_UNORM:
143 case VK_FORMAT_R8G8B8A8_SRGB:
144 case VK_FORMAT_B8G8R8A8_SRGB:
145 case VK_FORMAT_R8G8B8A8_SINT:
146 case VK_FORMAT_R5G6B5_UNORM_PACK16:
147 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
148 case VK_FORMAT_R8_UNORM:
149 case VK_FORMAT_R32G32B32A32_SFLOAT:
150 case VK_FORMAT_R32G32_SFLOAT:
151 case VK_FORMAT_R16G16B16A16_SFLOAT:
152 case VK_FORMAT_R16_SFLOAT:
153 return true;
154 default:
155 return false;
156 }
157 }
158
GrVkFormatIsSRGB(VkFormat format,VkFormat * linearFormat)159 bool GrVkFormatIsSRGB(VkFormat format, VkFormat* linearFormat) {
160 VkFormat linearFmt = format;
161 switch (format) {
162 case VK_FORMAT_R8_SRGB:
163 linearFmt = VK_FORMAT_R8_UNORM;
164 break;
165 case VK_FORMAT_R8G8_SRGB:
166 linearFmt = VK_FORMAT_R8G8_UNORM;
167 break;
168 case VK_FORMAT_R8G8B8_SRGB:
169 linearFmt = VK_FORMAT_R8G8B8_UNORM;
170 break;
171 case VK_FORMAT_B8G8R8_SRGB:
172 linearFmt = VK_FORMAT_B8G8R8_UNORM;
173 break;
174 case VK_FORMAT_R8G8B8A8_SRGB:
175 linearFmt = VK_FORMAT_R8G8B8A8_UNORM;
176 break;
177 case VK_FORMAT_B8G8R8A8_SRGB:
178 linearFmt = VK_FORMAT_B8G8R8A8_UNORM;
179 break;
180 case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
181 linearFmt = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
182 break;
183 case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
184 linearFmt = VK_FORMAT_BC1_RGB_UNORM_BLOCK;
185 break;
186 case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
187 linearFmt = VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
188 break;
189 case VK_FORMAT_BC2_SRGB_BLOCK:
190 linearFmt = VK_FORMAT_BC2_UNORM_BLOCK;
191 break;
192 case VK_FORMAT_BC3_SRGB_BLOCK:
193 linearFmt = VK_FORMAT_BC3_UNORM_BLOCK;
194 break;
195 case VK_FORMAT_BC7_SRGB_BLOCK:
196 linearFmt = VK_FORMAT_BC7_UNORM_BLOCK;
197 break;
198 case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
199 linearFmt = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
200 break;
201 case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
202 linearFmt = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;
203 break;
204 case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
205 linearFmt = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
206 break;
207 case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
208 linearFmt = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
209 break;
210 case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
211 linearFmt = VK_FORMAT_ASTC_5x4_UNORM_BLOCK;
212 break;
213 case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
214 linearFmt = VK_FORMAT_ASTC_5x5_UNORM_BLOCK;
215 break;
216 case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
217 linearFmt = VK_FORMAT_ASTC_6x5_UNORM_BLOCK;
218 break;
219 case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
220 linearFmt = VK_FORMAT_ASTC_6x6_UNORM_BLOCK;
221 break;
222 case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
223 linearFmt = VK_FORMAT_ASTC_8x5_UNORM_BLOCK;
224 break;
225 case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
226 linearFmt = VK_FORMAT_ASTC_8x6_UNORM_BLOCK;
227 break;
228 case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
229 linearFmt = VK_FORMAT_ASTC_8x8_UNORM_BLOCK;
230 break;
231 case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
232 linearFmt = VK_FORMAT_ASTC_10x5_UNORM_BLOCK;
233 break;
234 case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
235 linearFmt = VK_FORMAT_ASTC_10x6_UNORM_BLOCK;
236 break;
237 case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
238 linearFmt = VK_FORMAT_ASTC_10x8_UNORM_BLOCK;
239 break;
240 case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
241 linearFmt = VK_FORMAT_ASTC_10x10_UNORM_BLOCK;
242 break;
243 case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
244 linearFmt = VK_FORMAT_ASTC_12x10_UNORM_BLOCK;
245 break;
246 case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
247 linearFmt = VK_FORMAT_ASTC_12x12_UNORM_BLOCK;
248 break;
249 default:
250 break;
251 }
252 if (linearFormat) {
253 *linearFormat = linearFmt;
254 }
255 return (linearFmt != format);
256 }
257
GrSampleCountToVkSampleCount(uint32_t samples,VkSampleCountFlagBits * vkSamples)258 bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
259 SkASSERT(samples >= 1);
260 switch (samples) {
261 case 1:
262 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
263 return true;
264 case 2:
265 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
266 return true;
267 case 4:
268 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
269 return true;
270 case 8:
271 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
272 return true;
273 case 16:
274 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
275 return true;
276 case 32:
277 *vkSamples = VK_SAMPLE_COUNT_32_BIT;
278 return true;
279 case 64:
280 *vkSamples = VK_SAMPLE_COUNT_64_BIT;
281 return true;
282 default:
283 return false;
284 }
285 }
286
vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage)287 SkSL::Program::Kind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) {
288 if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
289 return SkSL::Program::kVertex_Kind;
290 }
291 if (VK_SHADER_STAGE_GEOMETRY_BIT == stage) {
292 return SkSL::Program::kGeometry_Kind;
293 }
294 SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
295 return SkSL::Program::kFragment_Kind;
296 }
297
skiasl_kind_to_vk_shader_stage(SkSL::Program::Kind kind)298 VkShaderStageFlagBits skiasl_kind_to_vk_shader_stage(SkSL::Program::Kind kind) {
299 if (SkSL::Program::kVertex_Kind == kind) {
300 return VK_SHADER_STAGE_VERTEX_BIT;
301 }
302 if (SkSL::Program::kGeometry_Kind == kind) {
303 return VK_SHADER_STAGE_GEOMETRY_BIT;
304 }
305 SkASSERT(SkSL::Program::kFragment_Kind == kind);
306 return VK_SHADER_STAGE_FRAGMENT_BIT;
307 }
308
GrCompileVkShaderModule(const GrVkGpu * gpu,const char * shaderString,VkShaderStageFlagBits stage,VkShaderModule * shaderModule,VkPipelineShaderStageCreateInfo * stageInfo,const SkSL::Program::Settings & settings,SkSL::Program::Inputs * outInputs)309 bool GrCompileVkShaderModule(const GrVkGpu* gpu,
310 const char* shaderString,
311 VkShaderStageFlagBits stage,
312 VkShaderModule* shaderModule,
313 VkPipelineShaderStageCreateInfo* stageInfo,
314 const SkSL::Program::Settings& settings,
315 SkSL::Program::Inputs* outInputs) {
316 std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram(
317 vk_shader_stage_to_skiasl_kind(stage),
318 SkSL::String(shaderString),
319 settings);
320 if (!program) {
321 SkDebugf("SkSL error:\n%s\n", gpu->shaderCompiler()->errorText().c_str());
322 SkASSERT(false);
323 }
324 *outInputs = program->fInputs;
325 SkSL::String code;
326 if (!gpu->shaderCompiler()->toSPIRV(*program, &code)) {
327 SkDebugf("%s\n", gpu->shaderCompiler()->errorText().c_str());
328 return false;
329 }
330
331 VkShaderModuleCreateInfo moduleCreateInfo;
332 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
333 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
334 moduleCreateInfo.pNext = nullptr;
335 moduleCreateInfo.flags = 0;
336 moduleCreateInfo.codeSize = code.size();
337 moduleCreateInfo.pCode = (const uint32_t*)code.c_str();
338
339 VkResult err = GR_VK_CALL(gpu->vkInterface(), CreateShaderModule(gpu->device(),
340 &moduleCreateInfo,
341 nullptr,
342 shaderModule));
343 if (err) {
344 return false;
345 }
346
347 memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo));
348 stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
349 stageInfo->pNext = nullptr;
350 stageInfo->flags = 0;
351 stageInfo->stage = skiasl_kind_to_vk_shader_stage(program->fKind);
352 stageInfo->module = *shaderModule;
353 stageInfo->pName = "main";
354 stageInfo->pSpecializationInfo = nullptr;
355
356 return true;
357 }
358