1 /* Copyright (c) 2017-2020 Hans-Kristian Arntzen
2 *
3 * Permission is hereby granted, free of charge, to any person obtaining
4 * a copy of this software and associated documentation files (the
5 * "Software"), to deal in the Software without restriction, including
6 * without limitation the rights to use, copy, modify, merge, publish,
7 * distribute, sublicense, and/or sell copies of the Software, and to
8 * permit persons to whom the Software is furnished to do so, subject to
9 * the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be
12 * included in all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
17 * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
18 * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
19 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
20 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 */
22
23 #ifndef FRAMEWORK_MEMORY_ALLOCATOR_HPP
24 #define FRAMEWORK_MEMORY_ALLOCATOR_HPP
25
26 #include "intrusive.hpp"
27 #include "object_pool.hpp"
28 #include "intrusive_list.hpp"
29 #include "vulkan_headers.hpp"
30 #include "logging.hpp"
31 #include "bitops.hpp"
32 #include <assert.h>
33 #include <memory>
34 #include <stddef.h>
35 #include <stdint.h>
36 #include <vector>
37
38 #ifdef GRANITE_VULKAN_MT
39 #include <mutex>
40 #endif
41
42 namespace Vulkan
43 {
44 class Device;
45
log2_integer(uint32_t v)46 static inline uint32_t log2_integer(uint32_t v)
47 {
48 v--;
49 return 32 - leading_zeroes(v);
50 }
51
52 enum MemoryClass
53 {
54 MEMORY_CLASS_SMALL = 0,
55 MEMORY_CLASS_MEDIUM,
56 MEMORY_CLASS_LARGE,
57 MEMORY_CLASS_HUGE,
58 MEMORY_CLASS_COUNT
59 };
60
61 enum AllocationTiling
62 {
63 ALLOCATION_TILING_LINEAR = 0,
64 ALLOCATION_TILING_OPTIMAL,
65 ALLOCATION_TILING_COUNT
66 };
67
68 enum MemoryAccessFlag
69 {
70 MEMORY_ACCESS_WRITE_BIT = 1,
71 MEMORY_ACCESS_READ_BIT = 2,
72 MEMORY_ACCESS_READ_WRITE_BIT = MEMORY_ACCESS_WRITE_BIT | MEMORY_ACCESS_READ_BIT
73 };
74 using MemoryAccessFlags = uint32_t;
75
76 struct DeviceAllocation;
77 class DeviceAllocator;
78
79 class Block
80 {
81 public:
82 enum
83 {
84 NumSubBlocks = 32u,
85 AllFree = ~0u
86 };
87
88 Block(const Block &) = delete;
89 void operator=(const Block &) = delete;
90
Block()91 Block()
92 {
93 for (auto &v : free_blocks)
94 v = AllFree;
95 longest_run = 32;
96 }
97
~Block()98 ~Block()
99 {
100 if (free_blocks[0] != AllFree)
101 LOGE("Memory leak in block detected.\n");
102 }
103
full() const104 inline bool full() const
105 {
106 return free_blocks[0] == 0;
107 }
108
empty() const109 inline bool empty() const
110 {
111 return free_blocks[0] == AllFree;
112 }
113
get_longest_run() const114 inline uint32_t get_longest_run() const
115 {
116 return longest_run;
117 }
118
119 void allocate(uint32_t num_blocks, DeviceAllocation *block);
120 void free(uint32_t mask);
121
122 private:
123 uint32_t free_blocks[NumSubBlocks];
124 uint32_t longest_run = 0;
125
update_longest_run()126 inline void update_longest_run()
127 {
128 uint32_t f = free_blocks[0];
129 longest_run = 0;
130
131 while (f)
132 {
133 free_blocks[longest_run++] = f;
134 f &= f >> 1;
135 }
136 }
137 };
138
139 struct MiniHeap;
140 class ClassAllocator;
141 class DeviceAllocator;
142 class Allocator;
143 class Device;
144
145 struct DeviceAllocation
146 {
147 friend class ClassAllocator;
148 friend class Allocator;
149 friend class Block;
150 friend class DeviceAllocator;
151 friend class Device;
152
153 public:
get_memoryVulkan::DeviceAllocation154 inline VkDeviceMemory get_memory() const
155 {
156 return base;
157 }
158
allocation_is_globalVulkan::DeviceAllocation159 inline bool allocation_is_global() const
160 {
161 return !alloc && base;
162 }
163
get_offsetVulkan::DeviceAllocation164 inline uint32_t get_offset() const
165 {
166 return offset;
167 }
168
get_sizeVulkan::DeviceAllocation169 inline uint32_t get_size() const
170 {
171 return size;
172 }
173
get_maskVulkan::DeviceAllocation174 inline uint32_t get_mask() const
175 {
176 return mask;
177 }
178
is_host_allocationVulkan::DeviceAllocation179 inline bool is_host_allocation() const
180 {
181 return host_base != nullptr;
182 }
183
184 void free_immediate();
185 void free_immediate(DeviceAllocator &allocator);
186
187 static DeviceAllocation make_imported_allocation(VkDeviceMemory memory, VkDeviceSize size, uint32_t memory_type);
188
189 private:
190 VkDeviceMemory base = VK_NULL_HANDLE;
191 uint8_t *host_base = nullptr;
192 ClassAllocator *alloc = nullptr;
193 Util::IntrusiveList<MiniHeap>::Iterator heap = {};
194 uint32_t offset = 0;
195 uint32_t mask = 0;
196 uint32_t size = 0;
197
198 uint8_t tiling = 0;
199 uint8_t memory_type = 0;
200 bool hierarchical = false;
201
202 void free_global(DeviceAllocator &allocator, uint32_t size, uint32_t memory_type);
203
get_host_memoryVulkan::DeviceAllocation204 inline uint8_t *get_host_memory() const
205 {
206 return host_base;
207 }
208 };
209
210 struct MiniHeap : Util::IntrusiveListEnabled<MiniHeap>
211 {
212 DeviceAllocation allocation;
213 Block heap;
214 };
215
216 class Allocator;
217
218 class ClassAllocator
219 {
220 public:
221 friend class Allocator;
222 ~ClassAllocator();
223
set_tiling_mask(uint32_t mask)224 inline void set_tiling_mask(uint32_t mask)
225 {
226 tiling_mask = mask;
227 }
228
set_sub_block_size(uint32_t size)229 inline void set_sub_block_size(uint32_t size)
230 {
231 sub_block_size_log2 = log2_integer(size);
232 sub_block_size = size;
233 }
234
235 bool allocate(uint32_t size, AllocationTiling tiling, DeviceAllocation *alloc, bool hierarchical);
236 void free(DeviceAllocation *alloc);
237
238 private:
239 ClassAllocator() = default;
240 struct AllocationTilingHeaps
241 {
242 Util::IntrusiveList<MiniHeap> heaps[Block::NumSubBlocks];
243 Util::IntrusiveList<MiniHeap> full_heaps;
244 uint32_t heap_availability_mask = 0;
245 };
246 ClassAllocator *parent = nullptr;
247 AllocationTilingHeaps tiling_modes[ALLOCATION_TILING_COUNT];
248 Util::ObjectPool<MiniHeap> object_pool;
249
250 uint32_t sub_block_size = 1;
251 uint32_t sub_block_size_log2 = 0;
252 uint32_t tiling_mask = ~0u;
253 uint32_t memory_type = 0;
254 #ifdef GRANITE_VULKAN_MT
255 std::mutex lock;
256 #endif
257 DeviceAllocator *global_allocator = nullptr;
258
set_global_allocator(DeviceAllocator * allocator)259 void set_global_allocator(DeviceAllocator *allocator)
260 {
261 global_allocator = allocator;
262 }
263
set_memory_type(uint32_t type)264 void set_memory_type(uint32_t type)
265 {
266 memory_type = type;
267 }
268
269 void suballocate(uint32_t num_blocks, uint32_t tiling, uint32_t memory_type, MiniHeap &heap,
270 DeviceAllocation *alloc);
271
set_parent(ClassAllocator * allocator)272 inline void set_parent(ClassAllocator *allocator)
273 {
274 parent = allocator;
275 }
276 };
277
278 class Allocator
279 {
280 public:
281 Allocator();
282 void operator=(const Allocator &) = delete;
283 Allocator(const Allocator &) = delete;
284
285 bool allocate(uint32_t size, uint32_t alignment, AllocationTiling tiling, DeviceAllocation *alloc);
286 bool allocate_global(uint32_t size, DeviceAllocation *alloc);
287 bool allocate_dedicated(uint32_t size, DeviceAllocation *alloc, VkImage image);
get_class_allocator(MemoryClass clazz)288 inline ClassAllocator &get_class_allocator(MemoryClass clazz)
289 {
290 return classes[static_cast<unsigned>(clazz)];
291 }
292
free(DeviceAllocation * alloc)293 static void free(DeviceAllocation *alloc)
294 {
295 alloc->free_immediate();
296 }
297
set_memory_type(uint32_t memory_type_)298 void set_memory_type(uint32_t memory_type_)
299 {
300 memory_type = memory_type_;
301 for (auto &sub : classes)
302 sub.set_memory_type(memory_type);
303 }
304
set_global_allocator(DeviceAllocator * allocator)305 void set_global_allocator(DeviceAllocator *allocator)
306 {
307 for (auto &sub : classes)
308 sub.set_global_allocator(allocator);
309 global_allocator = allocator;
310 }
311
312 private:
313 ClassAllocator classes[MEMORY_CLASS_COUNT];
314 DeviceAllocator *global_allocator = nullptr;
315 uint32_t memory_type = 0;
316 };
317
318 class DeviceAllocator
319 {
320 public:
321 void init(Device *device);
set_supports_dedicated_allocation(bool enable)322 void set_supports_dedicated_allocation(bool enable)
323 {
324 use_dedicated = enable;
325 }
326
327 ~DeviceAllocator();
328
329 bool allocate(uint32_t size, uint32_t alignment, uint32_t memory_type, AllocationTiling tiling,
330 DeviceAllocation *alloc);
331 bool allocate_image_memory(uint32_t size, uint32_t alignment, uint32_t memory_type, AllocationTiling tiling,
332 DeviceAllocation *alloc, VkImage image, bool force_no_dedicated);
333
334 bool allocate_global(uint32_t size, uint32_t memory_type, DeviceAllocation *alloc);
335
336 void garbage_collect();
337 void *map_memory(const DeviceAllocation &alloc, MemoryAccessFlags flags, VkDeviceSize offset, VkDeviceSize length);
338 void unmap_memory(const DeviceAllocation &alloc, MemoryAccessFlags flags, VkDeviceSize offset, VkDeviceSize length);
339
340 bool allocate(uint32_t size, uint32_t memory_type, VkDeviceMemory *memory, uint8_t **host_memory, VkImage dedicated_image);
341 void free(uint32_t size, uint32_t memory_type, VkDeviceMemory memory, uint8_t *host_memory);
342 void free_no_recycle(uint32_t size, uint32_t memory_type, VkDeviceMemory memory, uint8_t *host_memory);
343
344 private:
345 std::vector<std::unique_ptr<Allocator>> allocators;
346 Device *device = nullptr;
347 const VolkDeviceTable *table = nullptr;
348 VkPhysicalDeviceMemoryProperties mem_props;
349 VkDeviceSize atom_alignment = 1;
350 #ifdef GRANITE_VULKAN_MT
351 std::mutex lock;
352 #endif
353 bool use_dedicated = false;
354
355 struct Allocation
356 {
357 VkDeviceMemory memory;
358 uint8_t *host_memory;
359 uint32_t size;
360 uint32_t type;
361 };
362
363 struct Heap
364 {
365 uint64_t size = 0;
366 std::vector<Allocation> blocks;
367 void garbage_collect(Device *device);
368 };
369
370 std::vector<Heap> heaps;
371 };
372 }
373
374 #endif
375