1 /*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7 
8 #include "src/gpu/vk/GrVkDescriptorSetManager.h"
9 
10 #include "src/gpu/vk/GrVkDescriptorPool.h"
11 #include "src/gpu/vk/GrVkDescriptorSet.h"
12 #include "src/gpu/vk/GrVkGpu.h"
13 #include "src/gpu/vk/GrVkUniformHandler.h"
14 
15 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
16 #include <sanitizer/lsan_interface.h>
17 #endif
18 
CreateUniformManager(GrVkGpu * gpu)19 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateUniformManager(GrVkGpu* gpu) {
20     SkSTArray<1, uint32_t> visibilities;
21     uint32_t stages = kVertex_GrShaderFlag | kFragment_GrShaderFlag;
22     if (gpu->vkCaps().shaderCaps()->geometryShaderSupport()) {
23         stages |= kGeometry_GrShaderFlag;
24     }
25     visibilities.push_back(stages);
26 
27     SkTArray<const GrVkSampler*> samplers;
28     return new GrVkDescriptorSetManager(gpu, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, visibilities,
29                                         samplers);
30 }
31 
CreateSamplerManager(GrVkGpu * gpu,VkDescriptorType type,const GrVkUniformHandler & uniformHandler)32 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
33         GrVkGpu* gpu, VkDescriptorType type, const GrVkUniformHandler& uniformHandler) {
34     SkSTArray<4, uint32_t> visibilities;
35     SkSTArray<4, const GrVkSampler*> immutableSamplers;
36     SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
37     for (int i = 0 ; i < uniformHandler.numSamplers(); ++i) {
38         visibilities.push_back(uniformHandler.samplerVisibility(i));
39         immutableSamplers.push_back(uniformHandler.immutableSampler(i));
40     }
41     return new GrVkDescriptorSetManager(gpu, type, visibilities, immutableSamplers);
42 }
43 
CreateSamplerManager(GrVkGpu * gpu,VkDescriptorType type,const SkTArray<uint32_t> & visibilities)44 GrVkDescriptorSetManager* GrVkDescriptorSetManager::CreateSamplerManager(
45         GrVkGpu* gpu, VkDescriptorType type, const SkTArray<uint32_t>& visibilities) {
46     SkSTArray<4, const GrVkSampler*> immutableSamplers;
47     SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
48     for (int i = 0 ; i < visibilities.count(); ++i) {
49         immutableSamplers.push_back(nullptr);
50     }
51     return new GrVkDescriptorSetManager(gpu, type, visibilities, immutableSamplers);
52 }
53 
GrVkDescriptorSetManager(GrVkGpu * gpu,VkDescriptorType type,const SkTArray<uint32_t> & visibilities,const SkTArray<const GrVkSampler * > & immutableSamplers)54 GrVkDescriptorSetManager::GrVkDescriptorSetManager(
55         GrVkGpu* gpu, VkDescriptorType type,
56         const SkTArray<uint32_t>& visibilities,
57         const SkTArray<const GrVkSampler*>& immutableSamplers)
58     : fPoolManager(type, gpu, visibilities, immutableSamplers) {
59 #ifdef SK_DEBUG
60     if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
61         SkASSERT(visibilities.count() == immutableSamplers.count());
62     } else {
63         SkASSERT(immutableSamplers.count() == 0);
64     }
65 #endif
66     for (int i = 0; i < visibilities.count(); ++i) {
67         fBindingVisibilities.push_back(visibilities[i]);
68     }
69     for (int i = 0; i < immutableSamplers.count(); ++i) {
70         const GrVkSampler* sampler = immutableSamplers[i];
71         if (sampler) {
72             sampler->ref();
73         }
74         fImmutableSamplers.push_back(sampler);
75     }
76 }
77 
getDescriptorSet(GrVkGpu * gpu,const Handle & handle)78 const GrVkDescriptorSet* GrVkDescriptorSetManager::getDescriptorSet(GrVkGpu* gpu,
79                                                                     const Handle& handle) {
80     const GrVkDescriptorSet* ds = nullptr;
81     int count = fFreeSets.count();
82     if (count > 0) {
83         ds = fFreeSets[count - 1];
84         fFreeSets.removeShuffle(count - 1);
85     } else {
86         VkDescriptorSet vkDS;
87         fPoolManager.getNewDescriptorSet(gpu, &vkDS);
88 
89         ds = new GrVkDescriptorSet(vkDS, fPoolManager.fPool, handle);
90     }
91     SkASSERT(ds);
92     return ds;
93 }
94 
recycleDescriptorSet(const GrVkDescriptorSet * descSet)95 void GrVkDescriptorSetManager::recycleDescriptorSet(const GrVkDescriptorSet* descSet) {
96     SkASSERT(descSet);
97     fFreeSets.push_back(descSet);
98 }
99 
release(GrVkGpu * gpu)100 void GrVkDescriptorSetManager::release(GrVkGpu* gpu) {
101     fPoolManager.freeGPUResources(gpu);
102 
103     for (int i = 0; i < fFreeSets.count(); ++i) {
104         fFreeSets[i]->unref(gpu);
105     }
106     fFreeSets.reset();
107 
108     for (int i = 0; i < fImmutableSamplers.count(); ++i) {
109         if (fImmutableSamplers[i]) {
110             fImmutableSamplers[i]->unref(gpu);
111         }
112     }
113     fImmutableSamplers.reset();
114 }
115 
abandon()116 void GrVkDescriptorSetManager::abandon() {
117     fPoolManager.abandonGPUResources();
118 
119     for (int i = 0; i < fFreeSets.count(); ++i) {
120         fFreeSets[i]->unrefAndAbandon();
121     }
122     fFreeSets.reset();
123 
124     for (int i = 0; i < fImmutableSamplers.count(); ++i) {
125         if (fImmutableSamplers[i]) {
126             fImmutableSamplers[i]->unrefAndAbandon();
127         }
128     }
129     fImmutableSamplers.reset();
130 }
131 
isCompatible(VkDescriptorType type,const GrVkUniformHandler * uniHandler) const132 bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
133                                             const GrVkUniformHandler* uniHandler) const {
134     SkASSERT(uniHandler);
135     if (type != fPoolManager.fDescType) {
136         return false;
137     }
138 
139     SkASSERT(type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
140     if (fBindingVisibilities.count() != uniHandler->numSamplers()) {
141         return false;
142     }
143     for (int i = 0; i < uniHandler->numSamplers(); ++i) {
144         if (uniHandler->samplerVisibility(i) != fBindingVisibilities[i] ||
145             uniHandler->immutableSampler(i) != fImmutableSamplers[i]) {
146             return false;
147         }
148     }
149     return true;
150 }
151 
isCompatible(VkDescriptorType type,const SkTArray<uint32_t> & visibilities) const152 bool GrVkDescriptorSetManager::isCompatible(VkDescriptorType type,
153                                             const SkTArray<uint32_t>& visibilities) const {
154     if (type != fPoolManager.fDescType) {
155         return false;
156     }
157 
158     if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
159         VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
160         if (fBindingVisibilities.count() != visibilities.count()) {
161             return false;
162         }
163         for (int i = 0; i < visibilities.count(); ++i) {
164             if (visibilities[i] != fBindingVisibilities[i] || fImmutableSamplers[i] != nullptr) {
165                 return false;
166             }
167         }
168     }
169     return true;
170 }
171 
172 ////////////////////////////////////////////////////////////////////////////////
173 
visibility_to_vk_stage_flags(uint32_t visibility)174 VkShaderStageFlags visibility_to_vk_stage_flags(uint32_t visibility) {
175     VkShaderStageFlags flags = 0;
176 
177     if (visibility & kVertex_GrShaderFlag) {
178         flags |= VK_SHADER_STAGE_VERTEX_BIT;
179     }
180     if (visibility & kGeometry_GrShaderFlag) {
181         flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
182     }
183     if (visibility & kFragment_GrShaderFlag) {
184         flags |= VK_SHADER_STAGE_FRAGMENT_BIT;
185     }
186     return flags;
187 }
188 
DescriptorPoolManager(VkDescriptorType type,GrVkGpu * gpu,const SkTArray<uint32_t> & visibilities,const SkTArray<const GrVkSampler * > & immutableSamplers)189 GrVkDescriptorSetManager::DescriptorPoolManager::DescriptorPoolManager(
190         VkDescriptorType type,
191         GrVkGpu* gpu,
192         const SkTArray<uint32_t>& visibilities,
193         const SkTArray<const GrVkSampler*>& immutableSamplers)
194     : fDescType(type)
195     , fCurrentDescriptorCount(0)
196     , fPool(nullptr) {
197 
198 
199     if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type ||
200         VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER == type) {
201         uint32_t numBindings = visibilities.count();
202         std::unique_ptr<VkDescriptorSetLayoutBinding[]> dsSamplerBindings(
203                 new VkDescriptorSetLayoutBinding[numBindings]);
204         for (uint32_t i = 0; i < numBindings; ++i) {
205             uint32_t visibility = visibilities[i];
206             dsSamplerBindings[i].binding = i;
207             dsSamplerBindings[i].descriptorType = type;
208             dsSamplerBindings[i].descriptorCount = 1;
209             dsSamplerBindings[i].stageFlags = visibility_to_vk_stage_flags(visibility);
210             if (VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER == type) {
211                 if (immutableSamplers[i]) {
212                     dsSamplerBindings[i].pImmutableSamplers = immutableSamplers[i]->samplerPtr();
213                 } else {
214                     dsSamplerBindings[i].pImmutableSamplers = nullptr;
215                 }
216             }
217         }
218 
219         VkDescriptorSetLayoutCreateInfo dsSamplerLayoutCreateInfo;
220         memset(&dsSamplerLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
221         dsSamplerLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
222         dsSamplerLayoutCreateInfo.pNext = nullptr;
223         dsSamplerLayoutCreateInfo.flags = 0;
224         dsSamplerLayoutCreateInfo.bindingCount = numBindings;
225         // Setting to nullptr fixes an error in the param checker validation layer. Even though
226         // bindingCount is 0 (which is valid), it still tries to validate pBindings unless it is
227         // null.
228         dsSamplerLayoutCreateInfo.pBindings = numBindings ? dsSamplerBindings.get() : nullptr;
229 
230 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
231         // skia:8713
232         __lsan::ScopedDisabler lsanDisabler;
233 #endif
234         GR_VK_CALL_ERRCHECK(gpu->vkInterface(),
235                             CreateDescriptorSetLayout(gpu->device(),
236                                                       &dsSamplerLayoutCreateInfo,
237                                                       nullptr,
238                                                       &fDescLayout));
239         fDescCountPerSet = visibilities.count();
240     } else {
241         SkASSERT(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER == type);
242         GR_STATIC_ASSERT(1 == kUniformDescPerSet);
243         SkASSERT(kUniformDescPerSet == visibilities.count());
244         // Create Uniform Buffer Descriptor
245         VkDescriptorSetLayoutBinding dsUniBinding;
246         memset(&dsUniBinding, 0, sizeof(dsUniBinding));
247         dsUniBinding.binding = GrVkUniformHandler::kUniformBinding;
248         dsUniBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
249         dsUniBinding.descriptorCount = 1;
250         dsUniBinding.stageFlags = visibility_to_vk_stage_flags(visibilities[0]);
251         dsUniBinding.pImmutableSamplers = nullptr;
252 
253         VkDescriptorSetLayoutCreateInfo uniformLayoutCreateInfo;
254         memset(&uniformLayoutCreateInfo, 0, sizeof(VkDescriptorSetLayoutCreateInfo));
255         uniformLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
256         uniformLayoutCreateInfo.pNext = nullptr;
257         uniformLayoutCreateInfo.flags = 0;
258         uniformLayoutCreateInfo.bindingCount = 1;
259         uniformLayoutCreateInfo.pBindings = &dsUniBinding;
260 
261 #if defined(SK_ENABLE_SCOPED_LSAN_SUPPRESSIONS)
262         // skia:8713
263         __lsan::ScopedDisabler lsanDisabler;
264 #endif
265         GR_VK_CALL_ERRCHECK(gpu->vkInterface(), CreateDescriptorSetLayout(gpu->device(),
266                                                                           &uniformLayoutCreateInfo,
267                                                                           nullptr,
268                                                                           &fDescLayout));
269         fDescCountPerSet = kUniformDescPerSet;
270     }
271 
272     SkASSERT(fDescCountPerSet < kStartNumDescriptors);
273     fMaxDescriptors = kStartNumDescriptors;
274     SkASSERT(fMaxDescriptors > 0);
275     this->getNewPool(gpu);
276 }
277 
getNewPool(GrVkGpu * gpu)278 void GrVkDescriptorSetManager::DescriptorPoolManager::getNewPool(GrVkGpu* gpu) {
279     if (fPool) {
280         fPool->unref(gpu);
281         uint32_t newPoolSize = fMaxDescriptors + ((fMaxDescriptors + 1) >> 1);
282         if (newPoolSize < kMaxDescriptors) {
283             fMaxDescriptors = newPoolSize;
284         } else {
285             fMaxDescriptors = kMaxDescriptors;
286         }
287 
288     }
289     fPool = gpu->resourceProvider().findOrCreateCompatibleDescriptorPool(fDescType,
290                                                                          fMaxDescriptors);
291     SkASSERT(fPool);
292 }
293 
getNewDescriptorSet(GrVkGpu * gpu,VkDescriptorSet * ds)294 void GrVkDescriptorSetManager::DescriptorPoolManager::getNewDescriptorSet(GrVkGpu* gpu,
295                                                                           VkDescriptorSet* ds) {
296     if (!fMaxDescriptors) {
297         return;
298     }
299     fCurrentDescriptorCount += fDescCountPerSet;
300     if (fCurrentDescriptorCount > fMaxDescriptors) {
301         this->getNewPool(gpu);
302         fCurrentDescriptorCount = fDescCountPerSet;
303     }
304 
305     VkDescriptorSetAllocateInfo dsAllocateInfo;
306     memset(&dsAllocateInfo, 0, sizeof(VkDescriptorSetAllocateInfo));
307     dsAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
308     dsAllocateInfo.pNext = nullptr;
309     dsAllocateInfo.descriptorPool = fPool->descPool();
310     dsAllocateInfo.descriptorSetCount = 1;
311     dsAllocateInfo.pSetLayouts = &fDescLayout;
312     GR_VK_CALL_ERRCHECK(gpu->vkInterface(), AllocateDescriptorSets(gpu->device(),
313                                                                    &dsAllocateInfo,
314                                                                    ds));
315 }
316 
freeGPUResources(GrVkGpu * gpu)317 void GrVkDescriptorSetManager::DescriptorPoolManager::freeGPUResources(GrVkGpu* gpu) {
318     if (fDescLayout) {
319         GR_VK_CALL(gpu->vkInterface(), DestroyDescriptorSetLayout(gpu->device(), fDescLayout,
320                                                                   nullptr));
321         fDescLayout = VK_NULL_HANDLE;
322     }
323 
324     if (fPool) {
325         fPool->unref(gpu);
326         fPool = nullptr;
327     }
328 }
329 
abandonGPUResources()330 void GrVkDescriptorSetManager::DescriptorPoolManager::abandonGPUResources() {
331     fDescLayout = VK_NULL_HANDLE;
332     if (fPool) {
333         fPool->unrefAndAbandon();
334         fPool = nullptr;
335     }
336 }
337