1 /*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7 
8 #include "src/gpu/vk/GrVkDescriptorSetManager.h"
9 
10 #include "src/gpu/vk/GrVkDescriptorPool.h"
11 #include "src/gpu/vk/GrVkDescriptorSet.h"
12 #include "src/gpu/vk/GrVkGpu.h"
13 #include "src/gpu/vk/GrVkUniformHandler.h"
14 
15 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
16 #include <sanitizer/lsan_interface.h>
17 #endif
18 
CreateUniformManager(GrVkGpu * gpu)19 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateUniformManager(GrVkGpu* gpu) {
20     SkSTArray<1, uint32_t> visibilities;
21     uint32_t stages = kVertex_GrShaderFlag | kFragment_GrShaderFlag;
22     if (gpu->vkCaps().shaderCaps()->geometryShaderSupport()) {
23         stages |= kGeometry_GrShaderFlag;
24     }
25     visibilities.push_back(stages);
26     SkTArray<const GrVkSampler*> samplers;
27     return Create(gpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, visibilities, samplers);
28 }
29 
CreateSamplerManager(GrVkGpu * gpu,VkDescriptorType type,const GrVkUniformHandler & uniformHandler)30 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
31         GrVkGpu* gpu, VkDescriptorType type, const GrVkUniformHandler& uniformHandler) {
32     SkSTArray<4, uint32_t> visibilities;
33     SkSTArray<4, const GrVkSampler*> immutableSamplers;
34     SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
35     for (int i = 0 ; i < uniformHandler.numSamplers(); ++i) {
36         visibilities.push_back(uniformHandler.samplerVisibility(i));
37         immutableSamplers.push_back(uniformHandler.immutableSampler(i));
38     }
39     return Create(gpu, type, visibilities, immutableSamplers);
40 }
41 
CreateSamplerManager(GrVkGpu * gpu,VkDescriptorType type,const SkTArray<uint32_t> & visibilities)42 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
43         GrVkGpu* gpu, VkDescriptorType type, const SkTArray<uint32_t>& visibilities) {
44     SkSTArray<4, const GrVkSampler*> immutableSamplers;
45     SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
46     for (int i = 0 ; i < visibilities.count(); ++i) {
47         immutableSamplers.push_back(nullptr);
48     }
49     return Create(gpu, type, visibilities, immutableSamplers);
50 }
51 
visibility_to_vk_stage_flags(uint32_t visibility)52 VkShaderStageFlags visibility_to_vk_stage_flags(uint32_t visibility) {
53     VkShaderStageFlags flags = 0;
54 
55     if (visibility & kVertex_GrShaderFlag) {
56         flags |= VK_SHADER_STAGE_VERTEX_BIT;
57     }
58     if (visibility & kGeometry_GrShaderFlag) {
59         flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
60     }
61     if (visibility & kFragment_GrShaderFlag) {
62         flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
63     }
64     return flags;
65 }
66 
get_layout_and_desc_count(GrVkGpu * gpu,VkDescriptorType type,const SkTArray<uint32_t> & visibilities,const SkTArray<const GrVkSampler * > & immutableSamplers,VkDescriptorSetLayout * descSetLayout,uint32_t * descCountPerSet)67 static bool get_layout_and_desc_count(GrVkGpu* gpu,
68                                       VkDescriptorType type,
69                                       const SkTArray<uint32_t>& visibilities,
70                                       const SkTArray<const GrVkSampler*>& immutableSamplers,
71                                       VkDescriptorSetLayout* descSetLayout,
72                                       uint32_t* descCountPerSet) {
73     if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
74         VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
75         uint32_t numBindings = visibilities.count();
76         std::unique_ptr<VkDescriptorSetLayoutBinding[]> dsSamplerBindings(
77                 new VkDescriptorSetLayoutBinding[numBindings]);
78         for (uint32_t i = 0; i < numBindings; ++i) {
79             uint32_t visibility = visibilities[i];
80             dsSamplerBindings[i].binding = i;
81             dsSamplerBindings[i].descriptorType = type;
82             dsSamplerBindings[i].descriptorCount = 1;
83             dsSamplerBindings[i].stageFlags = visibility_to_vk_stage_flags(visibility);
84             if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type) {
85                 if (immutableSamplers[i]) {
86                     dsSamplerBindings[i].pImmutableSamplers = immutableSamplers[i]->samplerPtr();
87                 } else {
88                     dsSamplerBindings[i].pImmutableSamplers = nullptr;
89                 }
90             }
91         }
92 
93         VkDescriptorSetLayoutCreateInfo dsSamplerLayoutCreateInfo;
94         memset(&dsSamplerLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
95         dsSamplerLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
96         dsSamplerLayoutCreateInfo.pNext = nullptr;
97         dsSamplerLayoutCreateInfo.flags = 0;
98         dsSamplerLayoutCreateInfo.bindingCount = numBindings;
99         // Setting to nullptr fixes an error in the param checker validation layer. Even though
100         // bindingCount is 0 (which is valid), it still tries to validate pBindings unless it is
101         // null.
102         dsSamplerLayoutCreateInfo.pBindings = numBindings ? dsSamplerBindings.get() : nullptr;
103 
104 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
105         // skia:8713
106         __lsan::ScopedDisabler lsanDisabler;
107 #endif
108         VkResult result;
109         GR_VK_CALL_RESULT(gpu, result,
110                           CreateDescriptorSetLayout(gpu->device(),
111                                                     &dsSamplerLayoutCreateInfo,
112                                                     nullptr,
113                                                     descSetLayout));
114         if (result != VK_SUCCESS) {
115             return false;
116         }
117 
118         *descCountPerSet = visibilities.count();
119     } else {
120         SkASSERT(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER == type);
121         static constexpr int kUniformDescPerSet = 1;
122         SkASSERT(kUniformDescPerSet == visibilities.count());
123         // Create Uniform Buffer Descriptor
124         VkDescriptorSetLayoutBinding dsUniBinding;
125         memset(&dsUniBinding, 0, sizeof(dsUniBinding));
126         dsUniBinding.binding = GrVkUniformHandler::kUniformBinding;
127         dsUniBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
128         dsUniBinding.descriptorCount = 1;
129         dsUniBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
130         dsUniBinding.pImmutableSamplers = nullptr;
131 
132         VkDescriptorSetLayoutCreateInfo uniformLayoutCreateInfo;
133         memset(&uniformLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
134         uniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
135         uniformLayoutCreateInfo.pNext = nullptr;
136         uniformLayoutCreateInfo.flags = 0;
137         uniformLayoutCreateInfo.bindingCount = 1;
138         uniformLayoutCreateInfo.pBindings = &dsUniBinding;
139 
140 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
141         // skia:8713
142         __lsan::ScopedDisabler lsanDisabler;
143 #endif
144         VkResult result;
145         GR_VK_CALL_RESULT(gpu, result, CreateDescriptorSetLayout(gpu->device(),
146                                                                  &uniformLayoutCreateInfo,
147                                                                  nullptr,
148                                                                  descSetLayout));
149         if (result != VK_SUCCESS) {
150             return false;
151         }
152 
153         *descCountPerSet = kUniformDescPerSet;
154     }
155     return true;
156 }
157 
Create(GrVkGpu * gpu,VkDescriptorType type,const SkTArray<uint32_t> & visibilities,const SkTArray<const GrVkSampler * > & immutableSamplers)158 GrVkDescriptorSetManager* GrVkDescriptorSetManager::Create(
159         GrVkGpu* gpu, VkDescriptorType type,
160         const SkTArray<uint32_t>& visibilities,
161         const SkTArray<const GrVkSampler*>& immutableSamplers) {
162 #ifdef SK_DEBUG
163     if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
164         SkASSERT(visibilities.count() == immutableSamplers.count());
165     } else {
166         SkASSERT(immutableSamplers.count() == 0);
167     }
168 #endif
169 
170     VkDescriptorSetLayout descSetLayout;
171     uint32_t descCountPerSet;
172     if (!get_layout_and_desc_count(gpu, type, visibilities, immutableSamplers, &descSetLayout,
173                                    &descCountPerSet)) {
174         return nullptr;
175     }
176     return new GrVkDescriptorSetManager(gpu, type, descSetLayout, descCountPerSet, visibilities,
177                                         immutableSamplers);
178 }
179 
GrVkDescriptorSetManager(GrVkGpu * gpu,VkDescriptorType type,VkDescriptorSetLayout descSetLayout,uint32_t descCountPerSet,const SkTArray<uint32_t> & visibilities,const SkTArray<const GrVkSampler * > & immutableSamplers)180 GrVkDescriptorSetManager::GrVkDescriptorSetManager(
181         GrVkGpu* gpu, VkDescriptorType type, VkDescriptorSetLayout descSetLayout,
182         uint32_t descCountPerSet, const SkTArray<uint32_t>& visibilities,
183         const SkTArray<const GrVkSampler*>& immutableSamplers)
184     : fPoolManager(descSetLayout, type, descCountPerSet) {
185     for (int i = 0; i < visibilities.count(); ++i) {
186         fBindingVisibilities.push_back(visibilities[i]);
187     }
188     for (int i = 0; i < immutableSamplers.count(); ++i) {
189         const GrVkSampler* sampler = immutableSamplers[i];
190         if (sampler) {
191             sampler->ref();
192         }
193         fImmutableSamplers.push_back(sampler);
194     }
195 }
196 
getDescriptorSet(GrVkGpu * gpu,const Handle & handle)197 const GrVkDescriptorSet* GrVkDescriptorSetManager::getDescriptorSet(GrVkGpu* gpu,
198                                                                     const Handle& handle) {
199     const GrVkDescriptorSet* ds = nullptr;
200     int count = fFreeSets.count();
201     if (count > 0) {
202         ds = fFreeSets[count - 1];
203         fFreeSets.removeShuffle(count - 1);
204     } else {
205         VkDescriptorSet vkDS;
206         if (!fPoolManager.getNewDescriptorSet(gpu, &vkDS)) {
207             return nullptr;
208         }
209 
210         ds = new GrVkDescriptorSet(gpu, vkDS, fPoolManager.fPool, handle);
211     }
212     SkASSERT(ds);
213     return ds;
214 }
215 
recycleDescriptorSet(const GrVkDescriptorSet * descSet)216 void GrVkDescriptorSetManager::recycleDescriptorSet(const GrVkDescriptorSet* descSet) {
217     SkASSERT(descSet);
218     fFreeSets.push_back(descSet);
219 }
220 
release(GrVkGpu * gpu)221 void GrVkDescriptorSetManager::release(GrVkGpu* gpu) {
222     fPoolManager.freeGPUResources(gpu);
223 
224     for (int i = 0; i < fFreeSets.count(); ++i) {
225         fFreeSets[i]->unref();
226     }
227     fFreeSets.reset();
228 
229     for (int i = 0; i < fImmutableSamplers.count(); ++i) {
230         if (fImmutableSamplers[i]) {
231             fImmutableSamplers[i]->unref();
232         }
233     }
234     fImmutableSamplers.reset();
235 }
236 
isCompatible(VkDescriptorType type,const GrVkUniformHandler * uniHandler) const237 bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
238                                             const GrVkUniformHandler* uniHandler) const {
239     SkASSERT(uniHandler);
240     if (type != fPoolManager.fDescType) {
241         return false;
242     }
243 
244     SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
245     if (fBindingVisibilities.count() != uniHandler->numSamplers()) {
246         return false;
247     }
248     for (int i = 0; i < uniHandler->numSamplers(); ++i) {
249         if (uniHandler->samplerVisibility(i) != fBindingVisibilities[i] ||
250             uniHandler->immutableSampler(i) != fImmutableSamplers[i]) {
251             return false;
252         }
253     }
254     return true;
255 }
256 
isCompatible(VkDescriptorType type,const SkTArray<uint32_t> & visibilities) const257 bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
258                                             const SkTArray<uint32_t>& visibilities) const {
259     if (type != fPoolManager.fDescType) {
260         return false;
261     }
262 
263     if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
264         VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
265         if (fBindingVisibilities.count() != visibilities.count()) {
266             return false;
267         }
268         for (int i = 0; i < visibilities.count(); ++i) {
269             if (visibilities[i] != fBindingVisibilities[i] || fImmutableSamplers[i] != nullptr) {
270                 return false;
271             }
272         }
273     }
274     return true;
275 }
276 
277 ////////////////////////////////////////////////////////////////////////////////
278 
DescriptorPoolManager(VkDescriptorSetLayout layout,VkDescriptorType type,uint32_t descCountPerSet)279 GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
280         VkDescriptorSetLayout layout,
281         VkDescriptorType type,
282         uint32_t descCountPerSet)
283     : fDescLayout(layout)
284     , fDescType(type)
285     , fDescCountPerSet(descCountPerSet)
286     , fMaxDescriptors(kStartNumDescriptors)
287     , fCurrentDescriptorCount(0)
288     , fPool(nullptr) {
289 }
290 
getNewPool(GrVkGpu * gpu)291 bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewPool(GrVkGpu* gpu) {
292     if (fPool) {
293         fPool->unref();
294         uint32_t newPoolSize = fMaxDescriptors + ((fMaxDescriptors + 1) >> 1);
295         if (newPoolSize < kMaxDescriptors) {
296             fMaxDescriptors = newPoolSize;
297         } else {
298             fMaxDescriptors = kMaxDescriptors;
299         }
300 
301     }
302     fPool = gpu->resourceProvider().findOrCreateCompatibleDescriptorPool(fDescType,
303                                                                          fMaxDescriptors);
304     return SkToBool(fPool);
305 }
306 
getNewDescriptorSet(GrVkGpu * gpu,VkDescriptorSet * ds)307 bool GrVkDescriptorSetManager::DescriptorPoolManager::getNewDescriptorSet(GrVkGpu* gpu,
308                                                                           VkDescriptorSet* ds) {
309     if (!fMaxDescriptors) {
310         return false;
311     }
312     fCurrentDescriptorCount += fDescCountPerSet;
313     if (!fPool || fCurrentDescriptorCount > fMaxDescriptors) {
314         if (!this->getNewPool(gpu) ) {
315             return false;
316         }
317         fCurrentDescriptorCount = fDescCountPerSet;
318     }
319 
320     VkDescriptorSetAllocateInfo dsAllocateInfo;
321     memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
322     dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
323     dsAllocateInfo.pNext = nullptr;
324     dsAllocateInfo.descriptorPool = fPool->descPool();
325     dsAllocateInfo.descriptorSetCount = 1;
326     dsAllocateInfo.pSetLayouts = &fDescLayout;
327     VkResult result;
328     GR_VK_CALL_RESULT(gpu, result, AllocateDescriptorSets(gpu->device(),
329                                                           &dsAllocateInfo,
330                                                           ds));
331     return result == VK_SUCCESS;
332 }
333 
freeGPUResources(GrVkGpu * gpu)334 void GrVkDescriptorSetManager::DescriptorPoolManager::freeGPUResources(GrVkGpu* gpu) {
335     if (fDescLayout) {
336         GR_VK_CALL(gpu->vkInterface(), DestroyDescriptorSetLayout(gpu->device(), fDescLayout,
337                                                                   nullptr));
338         fDescLayout = VK_NULL_HANDLE;
339     }
340 
341     if (fPool) {
342         fPool->unref();
343         fPool = nullptr;
344     }
345 }
346 
347