1 /*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/vk/GrVkUtil.h"
9
10 #include "src/gpu/GrContextPriv.h"
11 #include "src/gpu/GrDataUtils.h"
12 #include "src/gpu/vk/GrVkGpu.h"
13 #include "src/sksl/SkSLCompiler.h"
14
15 #ifdef SK_DEBUG
GrVkFormatColorTypePairIsValid(VkFormat format,GrColorType colorType)16 bool GrVkFormatColorTypePairIsValid(VkFormat format, GrColorType colorType) {
17 switch (format) {
18 case VK_FORMAT_R8G8B8A8_UNORM: return GrColorType::kRGBA_8888 == colorType ||
19 GrColorType::kRGB_888x == colorType;
20 case VK_FORMAT_B8G8R8A8_UNORM: return GrColorType::kBGRA_8888 == colorType;
21 case VK_FORMAT_R8G8B8A8_SRGB: return GrColorType::kRGBA_8888_SRGB == colorType;
22 case VK_FORMAT_R8G8B8_UNORM: return GrColorType::kRGB_888x == colorType;
23 case VK_FORMAT_R8G8_UNORM: return GrColorType::kRG_88 == colorType;
24 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return GrColorType::kRGBA_1010102 == colorType;
25 case VK_FORMAT_R5G6B5_UNORM_PACK16: return GrColorType::kBGR_565 == colorType;
26 // R4G4B4A4 is not required to be supported so we actually
27 // store RGBA_4444 data as B4G4R4A4.
28 case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return GrColorType::kABGR_4444 == colorType;
29 case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return GrColorType::kABGR_4444 == colorType;
30 case VK_FORMAT_R8_UNORM: return GrColorType::kAlpha_8 == colorType ||
31 GrColorType::kGray_8 == colorType;
32 case VK_FORMAT_R16G16B16A16_SFLOAT: return GrColorType::kRGBA_F16 == colorType ||
33 GrColorType::kRGBA_F16_Clamped == colorType;
34 case VK_FORMAT_R16_SFLOAT: return GrColorType::kAlpha_F16 == colorType;
35 case VK_FORMAT_R16_UNORM: return GrColorType::kAlpha_16 == colorType;
36 case VK_FORMAT_R16G16_UNORM: return GrColorType::kRG_1616 == colorType;
37 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM: return GrColorType::kRGB_888x == colorType;
38 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: return GrColorType::kRGB_888x == colorType;
39 case VK_FORMAT_R16G16B16A16_UNORM: return GrColorType::kRGBA_16161616 == colorType;
40 case VK_FORMAT_R16G16_SFLOAT: return GrColorType::kRG_F16 == colorType;
41 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return GrColorType::kRGB_888x == colorType;
42 default: return false;
43 }
44
45 SkUNREACHABLE;
46 }
47 #endif
48
GrVkFormatIsSupported(VkFormat format)49 bool GrVkFormatIsSupported(VkFormat format) {
50 switch (format) {
51 case VK_FORMAT_R8G8B8A8_UNORM:
52 case VK_FORMAT_B8G8R8A8_UNORM:
53 case VK_FORMAT_R8G8B8A8_SRGB:
54 case VK_FORMAT_R8G8B8_UNORM:
55 case VK_FORMAT_R8G8_UNORM:
56 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
57 case VK_FORMAT_R5G6B5_UNORM_PACK16:
58 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
59 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
60 case VK_FORMAT_R8_UNORM:
61 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
62 case VK_FORMAT_R16G16B16A16_SFLOAT:
63 case VK_FORMAT_R16_SFLOAT:
64 case VK_FORMAT_R16_UNORM:
65 case VK_FORMAT_R16G16_UNORM:
66 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
67 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
68 case VK_FORMAT_R16G16B16A16_UNORM:
69 case VK_FORMAT_R16G16_SFLOAT:
70 return true;
71 default:
72 return false;
73 }
74 }
75
GrVkFormatNeedsYcbcrSampler(VkFormat format)76 bool GrVkFormatNeedsYcbcrSampler(VkFormat format) {
77 return format == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM ||
78 format == VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
79 }
80
GrSampleCountToVkSampleCount(uint32_t samples,VkSampleCountFlagBits * vkSamples)81 bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
82 SkASSERT(samples >= 1);
83 switch (samples) {
84 case 1:
85 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
86 return true;
87 case 2:
88 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
89 return true;
90 case 4:
91 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
92 return true;
93 case 8:
94 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
95 return true;
96 case 16:
97 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
98 return true;
99 case 32:
100 *vkSamples = VK_SAMPLE_COUNT_32_BIT;
101 return true;
102 case 64:
103 *vkSamples = VK_SAMPLE_COUNT_64_BIT;
104 return true;
105 default:
106 return false;
107 }
108 }
109
vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage)110 SkSL::Program::Kind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) {
111 if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
112 return SkSL::Program::kVertex_Kind;
113 }
114 if (VK_SHADER_STAGE_GEOMETRY_BIT == stage) {
115 return SkSL::Program::kGeometry_Kind;
116 }
117 SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
118 return SkSL::Program::kFragment_Kind;
119 }
120
GrCompileVkShaderModule(const GrVkGpu * gpu,const SkSL::String & shaderString,VkShaderStageFlagBits stage,VkShaderModule * shaderModule,VkPipelineShaderStageCreateInfo * stageInfo,const SkSL::Program::Settings & settings,SkSL::String * outSPIRV,SkSL::Program::Inputs * outInputs)121 bool GrCompileVkShaderModule(const GrVkGpu* gpu,
122 const SkSL::String& shaderString,
123 VkShaderStageFlagBits stage,
124 VkShaderModule* shaderModule,
125 VkPipelineShaderStageCreateInfo* stageInfo,
126 const SkSL::Program::Settings& settings,
127 SkSL::String* outSPIRV,
128 SkSL::Program::Inputs* outInputs) {
129 auto errorHandler = gpu->getContext()->priv().getShaderErrorHandler();
130 std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram(
131 vk_shader_stage_to_skiasl_kind(stage), shaderString, settings);
132 if (!program) {
133 errorHandler->compileError(shaderString.c_str(),
134 gpu->shaderCompiler()->errorText().c_str());
135 return false;
136 }
137 *outInputs = program->fInputs;
138 if (!gpu->shaderCompiler()->toSPIRV(*program, outSPIRV)) {
139 errorHandler->compileError(shaderString.c_str(),
140 gpu->shaderCompiler()->errorText().c_str());
141 return false;
142 }
143
144 return GrInstallVkShaderModule(gpu, *outSPIRV, stage, shaderModule, stageInfo);
145 }
146
GrInstallVkShaderModule(const GrVkGpu * gpu,const SkSL::String & spirv,VkShaderStageFlagBits stage,VkShaderModule * shaderModule,VkPipelineShaderStageCreateInfo * stageInfo)147 bool GrInstallVkShaderModule(const GrVkGpu* gpu,
148 const SkSL::String& spirv,
149 VkShaderStageFlagBits stage,
150 VkShaderModule* shaderModule,
151 VkPipelineShaderStageCreateInfo* stageInfo) {
152 VkShaderModuleCreateInfo moduleCreateInfo;
153 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
154 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
155 moduleCreateInfo.pNext = nullptr;
156 moduleCreateInfo.flags = 0;
157 moduleCreateInfo.codeSize = spirv.size();
158 moduleCreateInfo.pCode = (const uint32_t*)spirv.c_str();
159
160 VkResult err = GR_VK_CALL(gpu->vkInterface(), CreateShaderModule(gpu->device(),
161 &moduleCreateInfo,
162 nullptr,
163 shaderModule));
164 if (err) {
165 return false;
166 }
167
168 memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo));
169 stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
170 stageInfo->pNext = nullptr;
171 stageInfo->flags = 0;
172 stageInfo->stage = stage;
173 stageInfo->module = *shaderModule;
174 stageInfo->pName = "main";
175 stageInfo->pSpecializationInfo = nullptr;
176
177 return true;
178 }
179
GrVkFormatIsCompressed(VkFormat vkFormat)180 bool GrVkFormatIsCompressed(VkFormat vkFormat) {
181 switch (vkFormat) {
182 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
183 return true;
184 default:
185 return false;
186 }
187 }
188
GrVkFormatToCompressionType(VkFormat vkFormat,SkImage::CompressionType * compressionType)189 bool GrVkFormatToCompressionType(VkFormat vkFormat, SkImage::CompressionType* compressionType) {
190 switch (vkFormat) {
191 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
192 *compressionType = SkImage::kETC1_CompressionType;
193 return true;
194 default:
195 return false;
196 }
197 }
198