1 /* Frobby: Software for monomial ideal computations.
2    Copyright (C) 2011 University of Aarhus
3    Contact Bjarke Hammersholt Roune for license information (www.broune.com)
4 
5    This program is free software; you can redistribute it and/or modify
6    it under the terms of the GNU General Public License as published by
7    the Free Software Foundation; either version 2 of the License, or
8    (at your option) any later version.
9 
10    This program is distributed in the hope that it will be useful,
11    but WITHOUT ANY WARRANTY; without even the implied warranty of
12    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13    GNU General Public License for more details.
14 
15    You should have received a copy of the GNU General Public License
16    along with this program.  If not, see http://www.gnu.org/licenses/.
17 */
18 #ifndef ARENA_GUARD
19 #define ARENA_GUARD
20 
21 #include <utility>
22 
23 #ifdef DEBUG
24 #include <stack>
25 #endif
26 
27 /** This is an arena allocator. Arena allocators are very fast at the
28  cost of imposing limitations on how memory can be deallocated.
29 
30  Allocation and deallocation must occur in stack order (LIFO). In
31  other words, only the most recently allocated buffer that has not
32  been deallocated yet can be deallocated. It is also possible to
33  deallocate all buffers that were deallocated after a given buffer. In
34  DEBUG mode stack order is enforced by ASSERTs.
35 
36  Arena satisfies allocation requests out of a larger block of
37  memory. When a block is exhausted another block must be allocated
38  using new. This new block is at least twice the size of the previous
39  block. Old blocks are never re-used though they will be deallocated
40  if they become old. So the current block is replaced if and only if
41  it becomes exhausted.
42 
43  The scheme of geometric block growth is used because it allows a very
44  fast implementation with excellent locality of reference. This can
45  consume memory beyond that which the user of the Arena needs - all
46  allocators have memory overhead. Optimal performance on both speed
47  and memory consumption can usully be reached by all code using the same
48  Arena object when that is possible given the stack-order limitation
49  on deallocation.
50 
51  All methods throw bad_alloc if backing memory allocation using new fails.
52 */
53 class Arena {
54  public:
55   Arena();
56   ~Arena();
57 
58   // ***** Basic void* interface *****
59 
60   /** Returns a pointer to a buffer of size bytes. Throws bad_alloc if
61    that is not possible. All allocated and not freed buffers have
62    unique addresses even when size is zero. */
63   void* alloc(size_t size);
64 
65   /** Frees the buffer pointed to by ptr. That buffer must be the most
66    recently allocated buffer from this Arena that has not yet been
67    freed. Double frees are not allowed. ptr must not be null. */
68   void freeTop(void* ptr);
69 
70   /** Frees the buffer pointed to by ptr and all not yet freed
71    allocations that have happened since that buffer was allocated. ptr
72    must not be null. */
73   void freeAndAllAfter(void* ptr);
74 
75 
76   // ***** Array interface *****
77 
78   /** As alloc(elementCount * sizeof(T)). Constructors for the
79    elements of the array are not called. */
80   template<class T>
81   pair<T*, T*> allocArrayNoCon(size_t elementCount);
82 
83   /** As allocArrayNoCon except that constructors for the elements of
84    the array are called. The constructors are called in increasing
85    order of index. Constructed objects are destructed in reverse
86    order if a constructor throws an exception. */
87   template<class T>
88   pair<T*, T*> allocArray(size_t elementCount);
89 
90   /** As freeTop(array) except that the elements of the array in the
91    range (array, arrayEnd] are deconstructed in decreasing order of
92    index. The destructors must not throw exceptions.
93 
94    array and arrayEnd must not be zero. */
95   template<class T>
96   void freeTopArray(T* array, T* arrayEnd);
97 
98   /** As freeTopArray(p.first, p.second). */
99   template<class T>
freeTopArray(pair<T *,T * > p)100   void freeTopArray(pair<T*, T*> p) {freeTopArray(p.first, p.second);}
101 
102   /** As freeAndAllAfter(array) except that the elements of the array
103    in the range (array, arrayEnd] are deconstructed in decreasing
104    order of index. The destructors must not throw exceptions. */
105   template<class T>
106   void freeArrayAndAllAfter(T* array, T* arrayEnd);
107 
108   /** As freeTopArrayAndAllAfter(p.first, p.second). */
109   template<class T>
freeArrayAndAllAfter(pair<T *,T * > p)110   void freeArrayAndAllAfter(pair<T*, T*> p) {
111 	freeArrayAndAllAfter(p.first, p.second);
112   }
113 
114   // ***** Miscellaneous *****
115 
116   /** Returns true if there are no live allocations for this Arena. */
isEmpty()117   bool isEmpty() const {return !_block.hasPreviousBlock() && _block.isEmpty();}
118 
119   /** Returns an arena object that can be used for non-thread safe
120    scratch memory after static objects have been initialized. The
121    default contract is that each function leaves this arena with the
122    exact same objects allocated as before the function was entered. It
123    is fine for functions to collaborate for example by using the arena
124    to return variable size objects without calling new, though care
125    should be used in such cases. */
getArena()126   static Arena& getArena() {return _scratchArena;}
127 
128  private:
129   /** Allocate a new block with at least needed bytes. */
130   void growCapacity(size_t needed);
131 
132   /** As Arena::freeTop where ptr was allocated from an old block. */
133   void freeTopFromOldBlock(void* ptr);
134 
135   /** As Arena::freeAndAllAfter where ptr was allocated from an old
136    block. */
137   void freeAndAllAfterFromOldBlock(void* ptr);
138 
139   /** Free the memory for the previous block. */
140   void discardPreviousBlock();
141 
142   /** Rounds value up to the nearest multiple of MemoryAlignment. This
143    number must be representable in a size_t. */
144   static size_t alignNoOverflow(size_t value);
145 
146   struct Block {
147 	Block();
148 
149 	inline bool isInBlock(const void* ptr) const;
getSizeBlock150 	size_t getSize() const {return _blockEnd - _blockBegin;}
getFreeCapacityBlock151 	size_t getFreeCapacity() const {return _blockEnd - _freeBegin;}
isEmptyBlock152 	bool isEmpty() const {return _blockBegin == _freeBegin;}
isNullBlock153 	bool isNull() const {return _blockBegin == 0;}
hasPreviousBlockBlock154 	bool hasPreviousBlock() const {return _previousBlock != 0;}
155 	IF_DEBUG(bool debugIsValid(const void* ptr) const;)
156 
157 	char* _blockBegin; /// beginning of current block (aligned)
158 	char* _freeBegin; /// pointer to first free byte (aligned)
159 	char* _blockEnd; /// one past last byte (aligned)
160 	Block* _previousBlock; /// null if none
161   } _block;
162 
163   static Arena _scratchArena;
164 
165   IF_DEBUG(stack<void*> _debugAllocs;)
166 };
167 
alignNoOverflow(const size_t value)168 inline size_t Arena::alignNoOverflow(const size_t value) {
169   const size_t decAlign = MemoryAlignment - 1; // compile time constant
170 
171   // This works because MemoryAlignment is a power of 2.
172   const size_t aligned = (value + decAlign) & (~decAlign);
173 
174   ASSERT(aligned % MemoryAlignment == 0); // alignment
175   ASSERT(aligned >= value); // no overflow
176   ASSERT(aligned - value < MemoryAlignment); // adjustment minimal
177   return aligned;
178 }
179 
alloc(size_t size)180 inline void* Arena::alloc(size_t size) {
181   // It is OK to check capacity before aligning size as capacity is aligned.
182   // This single if checks for three different special circumstances:
183   //  * size is 0 (size - 1 will overflow)
184   //  * there is not enough capacity (size > capacity)
185   //  * aligning size would cause an overflow (capacity is aligned)
186   const size_t capacity = _block.getFreeCapacity();
187   ASSERT(capacity % MemoryAlignment == 0);
188   if (size - 1 >= capacity) {
189 	ASSERT(size == 0 || size > capacity);
190 	if (size == 0) {
191 	  size = 1;
192 	  if (capacity > 0)
193 	    goto capacityOK;
194 	}
195 	growCapacity(size);
196   }
197  capacityOK:
198   ASSERT(0 < size);
199   ASSERT(size <= _block.getFreeCapacity());
200   ASSERT(alignNoOverflow(size) <= _block.getFreeCapacity());
201 
202   void* ptr = _block._freeBegin;
203   _block._freeBegin += alignNoOverflow(size);
204 
205   IF_DEBUG(_debugAllocs.push(ptr));
206   return ptr;
207 }
208 
freeTop(void * ptr)209 inline void Arena::freeTop(void* ptr) {
210   ASSERT(ptr != 0);
211 #ifdef DEBUG
212   ASSERT(!_debugAllocs.empty());
213   ASSERT(_debugAllocs.top() == ptr);
214   _debugAllocs.pop();
215 #endif
216 
217   if (!_block.isEmpty()) {
218 	ASSERT(_block.debugIsValid(ptr));
219     _block._freeBegin = static_cast<char*>(ptr);
220   } else
221 	freeTopFromOldBlock(ptr);
222 }
223 
freeAndAllAfter(void * ptr)224 inline void Arena::freeAndAllAfter(void* ptr) {
225   ASSERT(ptr != 0);
226 #ifdef DEBUG
227   while (!_debugAllocs.empty() && ptr != _debugAllocs.top())
228 	_debugAllocs.pop();
229   ASSERT(!_debugAllocs.empty());
230   ASSERT(_debugAllocs.top() == ptr);
231   _debugAllocs.pop();
232 #endif
233 
234   if (_block.isInBlock(ptr)) {
235 	ASSERT(_block.debugIsValid(ptr));
236 	_block._freeBegin = static_cast<char*>(ptr);
237   } else
238 	freeAndAllAfterFromOldBlock(ptr);
239 }
240 
isInBlock(const void * ptr)241 inline bool Arena::Block::isInBlock(const void* ptr) const {
242   const char* p = static_cast<const char*>(ptr);
243   const size_t offset = static_cast<size_t>(p - _blockBegin);
244   // if _blockBegin > ptr then offset overflows to a large integer
245   ASSERT((offset < getSize()) == (_blockBegin <= p && p < _blockEnd));
246   return offset < getSize();
247 }
248 
249 template<class T>
allocArrayNoCon(size_t elementCount)250 pair<T*, T*> Arena::allocArrayNoCon(size_t elementCount) {
251   if (elementCount > static_cast<size_t>(-1) / sizeof(T))
252 	throw bad_alloc();
253   const size_t size = elementCount * sizeof(T);
254   ASSERT(size / sizeof(T) == elementCount);
255   char* buffer = static_cast<char*>(alloc(size));
256   T* array = reinterpret_cast<T*>(buffer);
257   T* arrayEnd = reinterpret_cast<T*>(buffer + size);
258   return make_pair(array, arrayEnd);
259 }
260 
261 #undef new
262 template<class T>
allocArray(size_t elementCount)263 pair<T*, T*> Arena::allocArray(size_t elementCount) {
264   pair<T*, T*> p = allocArrayNoCon<T>(elementCount);
265   T* it = p.first;
266   try {
267 	for (; it != p.second; ++it)
268 	  new (it) T();
269   } catch (...) {
270 	freeTopArray<T>(p.first, it);
271 	throw;
272   }
273   return p;
274 }
275 #ifdef NEW_MACRO
276 #define new NEW_MACRO
277 #endif
278 
279 template<class T>
freeTopArray(T * array,T * arrayEnd)280 void Arena::freeTopArray(T* array, T* arrayEnd) {
281   ASSERT(array != 0);
282   ASSERT(array <= arrayEnd);
283 
284   while (arrayEnd != array) {
285 	--arrayEnd;
286 	arrayEnd->~T();
287   }
288   freeTop(array);
289 }
290 
291 template<class T>
freeArrayAndAllAfter(T * array,T * arrayEnd)292 void Arena::freeArrayAndAllAfter(T* array, T* arrayEnd) {
293   ASSERT(array != 0);
294   ASSERT(array <= arrayEnd);
295 
296   while (arrayEnd != array) {
297 	--arrayEnd;
298 	arrayEnd->~T();
299   }
300   freeAndAllAfter(array);
301 }
302 
303 #endif
304