1 /** @file
2 
3     Memory arena for allocations
4 
5     @section license License
6 
7     Licensed to the Apache Software Foundation (ASF) under one
8     or more contributor license agreements.  See the NOTICE file
9     distributed with this work for additional information
10     regarding copyright ownership.  The ASF licenses this file
11     to you under the Apache License, Version 2.0 (the
12     "License"); you may not use this file except in compliance
13     with the License.  You may obtain a copy of the License at
14 
15     http://www.apache.org/licenses/LICENSE-2.0
16 
17     Unless required by applicable law or agreed to in writing, software
18     distributed under the License is distributed on an "AS IS" BASIS,
19     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
20     See the License for the specific language governing permissions and
21     limitations under the License.
22  */
23 
24 #pragma once
25 
26 #include <new>
27 #include <mutex>
28 #include <memory>
29 #include <utility>
30 #include "tscpp/util/MemSpan.h"
31 #include "tscore/Scalar.h"
32 #include "tscore/IntrusivePtr.h"
33 
34 /// Apache Traffic Server commons.
35 namespace ts
36 {
37 /** A memory arena.
38 
39     The intended use is for allocating many small chunks of memory - few, large allocations are best
40     handled through other mechanisms. The purpose is to amortize the cost of allocation of each
41     chunk across larger internal allocations ("reserving memory"). In addition the allocated memory
42     chunks are presumed to have similar lifetimes so all of the memory in the arena can be released
43     when the arena is destroyed.
44  */
45 class MemArena
46 {
47   using self_type = MemArena; ///< Self reference type.
48 protected:
49   struct Block; // Forward declare
50   using BlockPtr = ts::IntrusivePtr<Block>;
51   friend struct IntrusivePtrPolicy<Block>;
52   /** Simple internal arena block of memory. Maintains the underlying memory.
53    *
54    * Intrusive pointer is used to keep all of the memory in this single block. This struct is just
55    * the header on the full memory block allowing the raw memory and the meta data to be obtained
56    * in a single memory allocation.
57    */
58   struct Block : public ts::IntrusivePtrCounter {
59     size_t size;         ///< Actual block size.
60     size_t allocated{0}; ///< Current allocated (in use) bytes.
61     BlockPtr next;       ///< List of previous blocks.
62 
63     /** Construct to have @a n bytes of available storage.
64      *
65      * Note this is descriptive - this presumes use via placement new and the size value describes
66      * memory already allocated immediately after this instance.
67      * @param n The amount of storage.
68      */
69     Block(size_t n);
70 
71     /// Get the start of the data in this block.
72     char *data();
73 
74     /// Get the start of the data in this block.
75     const char *data() const;
76 
77     /// Amount of unallocated storage.
78     size_t remaining() const;
79 
80     /// Span of unallocated storage.
81     MemSpan<void> remnant();
82 
83     /** Allocate @a n bytes from this block.
84      *
85      * @param n Number of bytes to allocate.
86      * @return The span of memory allocated.
87      */
88     MemSpan<void> alloc(size_t n);
89 
90     /** Check if the byte at address @a ptr is in this block.
91      *
92      * @param ptr Address of byte to check.
93      * @return @c true if @a ptr is in this block, @c false otherwise.
94      */
95     bool contains(const void *ptr) const;
96 
97     /** Override standard delete.
98      *
99      * This is required because the allocated memory size is larger than the class size which requires
100      * calling @c free differently.
101      *
102      * @param ptr Memory to be de-allocated.
103      */
104     static void operator delete(void *ptr);
105   };
106 
107 public:
108   /** Construct with reservation hint.
109    *
110    * No memory is initially reserved, but when memory is needed this will be done so at least
111    * @a n bytes of available memory is reserved.
112    *
113    * To pre-reserve call @c alloc(0), e.g.
114    * @code
115    * MemArena arena(512); // Make sure at least 512 bytes available in first block.
116    * arena.alloc(0); // Force allocation of first block.
117    * @endcode
118    *
119    * @param n Minimum number of available bytes in the first internally reserved block.
120    */
121   explicit MemArena(size_t n = DEFAULT_BLOCK_SIZE);
122 
123   /** Allocate @a n bytes of storage.
124 
125       Returns a span of memory within the arena. alloc() is self expanding but DOES NOT self
126       coalesce. This means that no matter the arena size, the caller will always be able to alloc()
127       @a n bytes.
128 
129       @param n number of bytes to allocate.
130       @return a MemSpan of the allocated memory.
131    */
132   MemSpan<void> alloc(size_t n);
133 
134   /** Allocate and initialize a block of memory.
135 
136       The template type specifies the type to create and any arguments are forwarded to the constructor. Example:
137       @code
138       struct Thing { ... };
139       Thing* thing = arena.make<Thing>(...constructor args...);
140       @endcode
141 
142       Do @b not call @c delete an object created this way - that will attempt to free the memory and break. A
143       destructor may be invoked explicitly but the point of this class is that no object in it needs to be
144       deleted, the memory will all be reclaimed when the Arena is destroyed. In general it is a bad idea
145       to make objects in the Arena that own memory that is not also in the Arena.
146   */
147   template <typename T, typename... Args> T *make(Args &&... args);
148 
149   /** Freeze reserved memory.
150 
151       All internal memory blocks are frozen and will not be involved in future allocations. Subsequent
152       allocation will reserve new internal blocks. By default the first reserved block will be large
153       enough to contain all frozen memory. If this is not correct a different target can be
154       specified as @a n.
155 
156       @param n Target number of available bytes in the next reserved internal block.
157       @return @c *this
158    */
159   MemArena &freeze(size_t n = 0);
160 
161   /** Unfreeze arena.
162    *
163    * Frozen memory is released.
164    *
165    * @return @c *this
166    */
167   self_type &thaw();
168 
169   /** Release all memory.
170 
171       Empties the entire arena and deallocates all underlying memory. The hint for the next reserved block size will
172       be @a n if @a n is not zero, otherwise it will be the sum of all allocations when this method was called.
173 
174       @return @c *this
175 
176    */
177   MemArena &clear(size_t n = 0);
178 
179   /// @returns the memory allocated in the generation.
180   size_t size() const;
181 
182   /// @returns the @c remaining space within the generation.
183   size_t remaining() const;
184 
185   /// @returns the remaining contiguous space in the active generation.
186   MemSpan<void> remnant() const;
187 
188   /// @returns the total number of bytes allocated within the arena.
189   size_t allocated_size() const;
190 
191   /** Check if a the byte at @a ptr is in memory owned by this arena.
192    *
193    * @param ptr Address of byte to check.
194    * @return @c true if the byte at @a ptr is in the arena, @c false if not.
195    */
196   bool contains(const void *ptr) const;
197 
198   /** Total memory footprint, including wasted space.
199    * @return Total memory footprint.
200    */
201   size_t reserved_size() const;
202 
203 protected:
204   /** Internally allocates a new block of memory of size @a n bytes.
205    *
206    * @param n Size of block to allocate.
207    * @return
208    */
209   BlockPtr make_block(size_t n);
210 
211   using Page      = ts::Scalar<4096>; ///< Size for rounding block sizes.
212   using Paragraph = ts::Scalar<16>;   ///< Minimum unit of memory allocation.
213 
214   static constexpr size_t ALLOC_HEADER_SIZE = 16; ///< Guess of overhead of @c malloc
215   /// Initial block size to allocate if not specified via API.
216   static constexpr size_t DEFAULT_BLOCK_SIZE = Page::SCALE - Paragraph{round_up(ALLOC_HEADER_SIZE + sizeof(Block))};
217 
218   size_t _active_allocated = 0; ///< Total allocations in the active generation.
219   size_t _active_reserved  = 0; ///< Total current reserved memory.
220   /// Total allocations in the previous generation. This is only non-zero while the arena is frozen.
221   size_t _prev_allocated = 0;
222   /// Total frozen reserved memory.
223   size_t _prev_reserved = 0;
224 
225   /// Minimum free space needed in the next allocated block.
226   /// This is not zero iff @c reserve was called.
227   size_t _reserve_hint = 0;
228 
229   BlockPtr _prev;   ///< Previous generation, frozen memory.
230   BlockPtr _active; ///< Current generation. Allocate here.
231 };
232 
233 // Implementation
234 
235 inline MemArena::Block::Block(size_t n) : size(n) {}
236 
237 inline char *
238 MemArena::Block::data()
239 {
240   return reinterpret_cast<char *>(this + 1);
241 }
242 
243 inline const char *
244 MemArena::Block::data() const
245 {
246   return reinterpret_cast<const char *>(this + 1);
247 }
248 
249 inline bool
250 MemArena::Block::contains(const void *ptr) const
251 {
252   const char *base = this->data();
253   return base <= ptr && ptr < base + size;
254 }
255 
256 inline size_t
257 MemArena::Block::remaining() const
258 {
259   return size - allocated;
260 }
261 
262 inline MemSpan<void>
263 MemArena::Block::alloc(size_t n)
264 {
265   ink_assert(n <= this->remaining());
266   MemSpan<void> zret = this->remnant().prefix(n);
267   allocated += n;
268   return zret;
269 }
270 
271 template <typename T, typename... Args>
272 T *
273 MemArena::make(Args &&... args)
274 {
275   return new (this->alloc(sizeof(T)).data()) T(std::forward<Args>(args)...);
276 }
277 
278 inline MemArena::MemArena(size_t n) : _reserve_hint(n) {}
279 
280 inline MemSpan<void>
281 MemArena::Block::remnant()
282 {
283   return {this->data() + allocated, this->remaining()};
284 }
285 
286 inline size_t
287 MemArena::size() const
288 {
289   return _active_allocated;
290 }
291 
292 inline size_t
293 MemArena::allocated_size() const
294 {
295   return _prev_allocated + _active_allocated;
296 }
297 
298 inline size_t
299 MemArena::remaining() const
300 {
301   return _active ? _active->remaining() : 0;
302 }
303 
304 inline MemSpan<void>
305 MemArena::remnant() const
306 {
307   return _active ? _active->remnant() : MemSpan<void>{};
308 }
309 
310 inline size_t
311 MemArena::reserved_size() const
312 {
313   return _active_reserved + _prev_reserved;
314 }
315 
316 } // namespace ts
317