1 /* Copyright (c) 2017-2020 Hans-Kristian Arntzen 2 * 3 * Permission is hereby granted, free of charge, to any person obtaining 4 * a copy of this software and associated documentation files (the 5 * "Software"), to deal in the Software without restriction, including 6 * without limitation the rights to use, copy, modify, merge, publish, 7 * distribute, sublicense, and/or sell copies of the Software, and to 8 * permit persons to whom the Software is furnished to do so, subject to 9 * the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be 12 * included in all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 17 * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 18 * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 19 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 20 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 */ 22 23 #pragma once 24 25 #include "vulkan_headers.hpp" 26 #include "intrusive.hpp" 27 #include <vector> 28 #include <algorithm> 29 30 namespace Vulkan 31 { 32 class Device; 33 class Buffer; 34 35 struct BufferBlockAllocation 36 { 37 uint8_t *host; 38 VkDeviceSize offset; 39 VkDeviceSize padded_size; 40 }; 41 42 struct BufferBlock 43 { 44 ~BufferBlock(); 45 Util::IntrusivePtr<Buffer> gpu; 46 Util::IntrusivePtr<Buffer> cpu; 47 VkDeviceSize offset = 0; 48 VkDeviceSize alignment = 0; 49 VkDeviceSize size = 0; 50 VkDeviceSize spill_size = 0; 51 uint8_t *mapped = nullptr; 52 allocateVulkan::BufferBlock53 BufferBlockAllocation allocate(VkDeviceSize allocate_size) 54 { 55 auto aligned_offset = (offset + alignment - 1) & ~(alignment - 1); 56 if (aligned_offset + allocate_size <= size) 57 { 58 auto *ret = mapped + aligned_offset; 59 offset = aligned_offset + allocate_size; 60 61 VkDeviceSize padded_size = std::max(allocate_size, spill_size); 62 padded_size = std::min(padded_size, size - aligned_offset); 63 64 return { ret, aligned_offset, padded_size }; 65 } 66 else 67 return { nullptr, 0, 0 }; 68 } 69 }; 70 71 class BufferPool 72 { 73 public: 74 ~BufferPool(); 75 void init(Device *device, VkDeviceSize block_size, VkDeviceSize alignment, VkBufferUsageFlags usage, bool need_device_local); 76 void reset(); 77 78 // Used for allocating UBOs, where we want to specify a fixed size for range, 79 // and we need to make sure we don't allocate beyond the block. 80 void set_spill_region_size(VkDeviceSize spill_size); 81 get_block_size() const82 VkDeviceSize get_block_size() const 83 { 84 return block_size; 85 } 86 87 BufferBlock request_block(VkDeviceSize minimum_size); 88 void recycle_block(BufferBlock &&block); 89 90 private: 91 Device *device = nullptr; 92 VkDeviceSize block_size = 0; 93 VkDeviceSize alignment = 0; 94 VkDeviceSize spill_size = 0; 95 VkBufferUsageFlags usage = 0; 96 std::vector<BufferBlock> blocks; 97 BufferBlock allocate_block(VkDeviceSize size); 98 bool need_device_local = false; 99 }; 100 }