xref: /dragonfly/contrib/gcc-8.0/gcc/alloc-pool.h (revision 38fd1498)
1 /* Functions to support a pool of allocatable objects
2    Copyright (C) 1997-2018 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@cgsoftware.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 #ifndef ALLOC_POOL_H
21 #define ALLOC_POOL_H
22 
23 #include "memory-block.h"
24 #include "options.h"	    // for flag_checking
25 
26 extern void dump_alloc_pool_statistics (void);
27 
28 /* Flag indicates whether memory statistics are gathered any longer.  */
29 extern bool after_memory_report;
30 
31 typedef unsigned long ALLOC_POOL_ID_TYPE;
32 
33 /* Last used ID.  */
34 extern ALLOC_POOL_ID_TYPE last_id;
35 
36 /* Pool allocator memory usage.  */
37 struct pool_usage: public mem_usage
38 {
39   /* Default contructor.  */
pool_usagepool_usage40   pool_usage (): m_element_size (0), m_pool_name ("") {}
41   /* Constructor.  */
pool_usagepool_usage42   pool_usage (size_t allocated, size_t times, size_t peak,
43 	      size_t instances, size_t element_size,
44 	      const char *pool_name)
45     : mem_usage (allocated, times, peak, instances),
46       m_element_size (element_size),
47       m_pool_name (pool_name) {}
48 
49   /* Sum the usage with SECOND usage.  */
50   pool_usage
51   operator+ (const pool_usage &second)
52   {
53     return pool_usage (m_allocated + second.m_allocated,
54 			     m_times + second.m_times,
55 			     m_peak + second.m_peak,
56 			     m_instances + second.m_instances,
57 			     m_element_size, m_pool_name);
58   }
59 
60   /* Dump usage coupled to LOC location, where TOTAL is sum of all rows.  */
61   inline void
dumppool_usage62   dump (mem_location *loc, mem_usage &total) const
63   {
64     char *location_string = loc->to_string ();
65 
66     fprintf (stderr, "%-32s%-48s %6li%10li:%5.1f%%%10li%10li:%5.1f%%%12li\n",
67 	     m_pool_name, location_string, (long)m_instances,
68 	     (long)m_allocated, get_percent (m_allocated, total.m_allocated),
69 	     (long)m_peak, (long)m_times,
70 	     get_percent (m_times, total.m_times),
71 	     (long)m_element_size);
72 
73     free (location_string);
74   }
75 
76   /* Dump header with NAME.  */
77   static inline void
dump_headerpool_usage78   dump_header (const char *name)
79   {
80     fprintf (stderr, "%-32s%-48s %6s%11s%16s%17s%12s\n", "Pool name", name,
81 	     "Pools", "Leak", "Peak", "Times", "Elt size");
82     print_dash_line ();
83   }
84 
85   /* Dump footer.  */
86   inline void
dump_footerpool_usage87   dump_footer ()
88   {
89     print_dash_line ();
90     fprintf (stderr, "%s%82li%10li\n", "Total", (long)m_instances,
91 	     (long)m_allocated);
92     print_dash_line ();
93   }
94 
95   /* Element size.  */
96   size_t m_element_size;
97   /* Pool name.  */
98   const char *m_pool_name;
99 };
100 
101 extern mem_alloc_description<pool_usage> pool_allocator_usage;
102 
103 #if 0
104 /* If a pool with custom block size is needed, one might use the following
105    template.  An instance of this template can be used as a parameter for
106    instantiating base_pool_allocator template:
107 
108 	typedef custom_block_allocator <128*1024> huge_block_allocator;
109 	...
110 	static base_pool_allocator <huge_block_allocator>
111 						value_pool ("value", 16384);
112 
113    Right now it's not used anywhere in the code, and is given here as an
114    example).  */
115 
116 template <size_t BlockSize>
117 class custom_block_allocator
118 {
119 public:
120   static const size_t block_size = BlockSize;
121 
122   static inline void *
123   allocate () ATTRIBUTE_MALLOC
124   {
125     return XNEWVEC (char, BlockSize);
126   }
127 
128   static inline void
129   release (void *block)
130   {
131     XDELETEVEC (block);
132   }
133 };
134 #endif
135 
136 /* Generic pool allocator.  */
137 
138 template <typename TBlockAllocator>
139 class base_pool_allocator
140 {
141 public:
142   /* Default constructor for pool allocator called NAME.  */
143   base_pool_allocator (const char *name, size_t size CXX_MEM_STAT_INFO);
144   ~base_pool_allocator ();
145   void release ();
146   void release_if_empty ();
147   void *allocate () ATTRIBUTE_MALLOC;
148   void remove (void *object);
149   size_t num_elts_current ();
150 
151 private:
152   struct allocation_pool_list
153   {
154     allocation_pool_list *next;
155   };
156 
157   /* Initialize a pool allocator.  */
158   void initialize ();
159 
160   struct allocation_object
161   {
162 #if CHECKING_P
163     /* The ID of alloc pool which the object was allocated from.  */
164     ALLOC_POOL_ID_TYPE id;
165 #endif
166 
167     union
168       {
169 	/* The data of the object.  */
170 	char data[1];
171 
172 	/* Because we want any type of data to be well aligned after the ID,
173 	   the following elements are here.  They are never accessed so
174 	   the allocated object may be even smaller than this structure.
175 	   We do not care about alignment for floating-point types.  */
176 	char *align_p;
177 	int64_t align_i;
178       } u;
179 
180 #if CHECKING_P
181     static inline allocation_object*
get_instanceallocation_object182     get_instance (void *data_ptr)
183     {
184       return (allocation_object *)(((char *)(data_ptr))
185 				      - offsetof (allocation_object,
186 						  u.data));
187     }
188 #endif
189 
190     static inline void*
get_dataallocation_object191     get_data (void *instance_ptr)
192     {
193       return (void*)(((allocation_object *) instance_ptr)->u.data);
194     }
195   };
196 
197   /* Align X to 8.  */
198   static inline size_t
align_eight(size_t x)199   align_eight (size_t x)
200   {
201     return (((x+7) >> 3) << 3);
202   }
203 
204   const char *m_name;
205   ALLOC_POOL_ID_TYPE m_id;
206   size_t m_elts_per_block;
207 
208   /* These are the elements that have been allocated at least once
209      and freed.  */
210   allocation_pool_list *m_returned_free_list;
211 
212   /* These are the elements that have not yet been allocated out of
213      the last block obtained from XNEWVEC.  */
214   char* m_virgin_free_list;
215 
216   /* The number of elements in the virgin_free_list that can be
217      allocated before needing another block.  */
218   size_t m_virgin_elts_remaining;
219   /* The number of elements that are allocated.  */
220   size_t m_elts_allocated;
221   /* The number of elements that are released.  */
222   size_t m_elts_free;
223   /* The number of allocated blocks.  */
224   size_t m_blocks_allocated;
225   /* List of blocks that are used to allocate new objects.  */
226   allocation_pool_list *m_block_list;
227   /* Size of a pool elements in bytes.  */
228   size_t m_elt_size;
229   /* Size in bytes that should be allocated for each element.  */
230   size_t m_size;
231   /* Flag if a pool allocator is initialized.  */
232   bool m_initialized;
233   /* Memory allocation location.  */
234   mem_location m_location;
235 };
236 
237 template <typename TBlockAllocator>
238 inline
base_pool_allocator(const char * name,size_t size MEM_STAT_DECL)239 base_pool_allocator <TBlockAllocator>::base_pool_allocator (
240 				const char *name, size_t size MEM_STAT_DECL):
241   m_name (name), m_id (0), m_elts_per_block (0), m_returned_free_list (NULL),
242   m_virgin_free_list (NULL), m_virgin_elts_remaining (0), m_elts_allocated (0),
243   m_elts_free (0), m_blocks_allocated (0), m_block_list (NULL), m_elt_size (0),
244   m_size (size), m_initialized (false),
245   m_location (ALLOC_POOL_ORIGIN, false PASS_MEM_STAT) {}
246 
247 /* Initialize a pool allocator.  */
248 
249 template <typename TBlockAllocator>
250 inline void
initialize()251 base_pool_allocator <TBlockAllocator>::initialize ()
252 {
253   gcc_checking_assert (!m_initialized);
254   m_initialized = true;
255 
256   size_t size = m_size;
257 
258   gcc_checking_assert (m_name);
259 
260   /* Make size large enough to store the list header.  */
261   if (size < sizeof (allocation_pool_list*))
262     size = sizeof (allocation_pool_list*);
263 
264   /* Now align the size to a multiple of 8.  */
265   size = align_eight (size);
266 
267   /* Add the aligned size of ID.  */
268   size += offsetof (allocation_object, u.data);
269 
270   m_elt_size = size;
271 
272   if (GATHER_STATISTICS)
273     {
274       pool_usage *u = pool_allocator_usage.register_descriptor
275 	(this, new mem_location (m_location));
276 
277       u->m_element_size = m_elt_size;
278       u->m_pool_name = m_name;
279     }
280 
281   /* List header size should be a multiple of 8.  */
282   size_t header_size = align_eight (sizeof (allocation_pool_list));
283 
284   m_elts_per_block = (TBlockAllocator::block_size - header_size) / size;
285   gcc_checking_assert (m_elts_per_block != 0);
286 
287   /* Increase the last used ID and use it for this pool.
288      ID == 0 is used for free elements of pool so skip it.  */
289   last_id++;
290   if (last_id == 0)
291     last_id++;
292 
293   m_id = last_id;
294 }
295 
296 /* Free all memory allocated for the given memory pool.  */
297 template <typename TBlockAllocator>
298 inline void
release()299 base_pool_allocator <TBlockAllocator>::release ()
300 {
301   if (!m_initialized)
302     return;
303 
304   allocation_pool_list *block, *next_block;
305 
306   /* Free each block allocated to the pool.  */
307   for (block = m_block_list; block != NULL; block = next_block)
308     {
309       next_block = block->next;
310       TBlockAllocator::release (block);
311     }
312 
313   if (GATHER_STATISTICS && !after_memory_report)
314     {
315       pool_allocator_usage.release_instance_overhead
316 	(this, (m_elts_allocated - m_elts_free) * m_elt_size);
317     }
318 
319   m_returned_free_list = NULL;
320   m_virgin_free_list = NULL;
321   m_virgin_elts_remaining = 0;
322   m_elts_allocated = 0;
323   m_elts_free = 0;
324   m_blocks_allocated = 0;
325   m_block_list = NULL;
326 }
327 
328 template <typename TBlockAllocator>
329 inline void
release_if_empty()330 base_pool_allocator <TBlockAllocator>::release_if_empty ()
331 {
332   if (m_elts_free == m_elts_allocated)
333     release ();
334 }
335 
336 template <typename TBlockAllocator>
~base_pool_allocator()337 inline base_pool_allocator <TBlockAllocator>::~base_pool_allocator ()
338 {
339   release ();
340 }
341 
342 /* Allocates one element from the pool specified.  */
343 template <typename TBlockAllocator>
344 inline void*
allocate()345 base_pool_allocator <TBlockAllocator>::allocate ()
346 {
347   if (!m_initialized)
348     initialize ();
349 
350   allocation_pool_list *header;
351 #ifdef ENABLE_VALGRIND_ANNOTATIONS
352   int size;
353 #endif
354 
355   if (GATHER_STATISTICS)
356     {
357       pool_allocator_usage.register_instance_overhead (m_elt_size, this);
358     }
359 
360 #ifdef ENABLE_VALGRIND_ANNOTATIONS
361   size = m_elt_size - offsetof (allocation_object, u.data);
362 #endif
363 
364   /* If there are no more free elements, make some more!.  */
365   if (!m_returned_free_list)
366     {
367       char *block;
368       if (!m_virgin_elts_remaining)
369 	{
370 	  allocation_pool_list *block_header;
371 
372 	  /* Make the block.  */
373 	  block = reinterpret_cast<char *> (TBlockAllocator::allocate ());
374 	  block_header = new (block) allocation_pool_list;
375 	  block += align_eight (sizeof (allocation_pool_list));
376 
377 	  /* Throw it on the block list.  */
378 	  block_header->next = m_block_list;
379 	  m_block_list = block_header;
380 
381 	  /* Make the block available for allocation.  */
382 	  m_virgin_free_list = block;
383 	  m_virgin_elts_remaining = m_elts_per_block;
384 
385 	  /* Also update the number of elements we have free/allocated, and
386 	     increment the allocated block count.  */
387 	  m_elts_allocated += m_elts_per_block;
388 	  m_elts_free += m_elts_per_block;
389 	  m_blocks_allocated += 1;
390 	}
391 
392       /* We now know that we can take the first elt off the virgin list and
393 	 put it on the returned list.  */
394       block = m_virgin_free_list;
395       header = (allocation_pool_list*) allocation_object::get_data (block);
396       header->next = NULL;
397 
398       /* Mark the element to be free.  */
399 #if CHECKING_P
400       ((allocation_object*) block)->id = 0;
401 #endif
402       VALGRIND_DISCARD (VALGRIND_MAKE_MEM_NOACCESS (header,size));
403       m_returned_free_list = header;
404       m_virgin_free_list += m_elt_size;
405       m_virgin_elts_remaining--;
406 
407     }
408 
409   /* Pull the first free element from the free list, and return it.  */
410   header = m_returned_free_list;
411   VALGRIND_DISCARD (VALGRIND_MAKE_MEM_DEFINED (header, sizeof (*header)));
412   m_returned_free_list = header->next;
413   m_elts_free--;
414 
415   /* Set the ID for element.  */
416 #if CHECKING_P
417   allocation_object::get_instance (header)->id = m_id;
418 #endif
419   VALGRIND_DISCARD (VALGRIND_MAKE_MEM_UNDEFINED (header, size));
420 
421   return (void *)(header);
422 }
423 
424 /* Puts PTR back on POOL's free list.  */
425 template <typename TBlockAllocator>
426 inline void
remove(void * object)427 base_pool_allocator <TBlockAllocator>::remove (void *object)
428 {
429   int size = m_elt_size - offsetof (allocation_object, u.data);
430 
431   if (flag_checking)
432     {
433       gcc_assert (m_initialized);
434       gcc_assert (object
435 		  /* Check if we free more than we allocated.  */
436 		  && m_elts_free < m_elts_allocated);
437 #if CHECKING_P
438       /* Check whether the PTR was allocated from POOL.  */
439       gcc_assert (m_id == allocation_object::get_instance (object)->id);
440 #endif
441 
442       memset (object, 0xaf, size);
443     }
444 
445 #if CHECKING_P
446   /* Mark the element to be free.  */
447   allocation_object::get_instance (object)->id = 0;
448 #endif
449 
450   allocation_pool_list *header = new (object) allocation_pool_list;
451   header->next = m_returned_free_list;
452   m_returned_free_list = header;
453   VALGRIND_DISCARD (VALGRIND_MAKE_MEM_NOACCESS (object, size));
454   m_elts_free++;
455 
456   if (GATHER_STATISTICS)
457     {
458       pool_allocator_usage.release_instance_overhead (this, m_elt_size);
459     }
460 }
461 
462 /* Number of elements currently active (not returned to pool).  Used for cheap
463    consistency checks.  */
464 template <typename TBlockAllocator>
465 inline size_t
num_elts_current()466 base_pool_allocator <TBlockAllocator>::num_elts_current ()
467 {
468   return m_elts_allocated - m_elts_free;
469 }
470 
471 /* Specialization of base_pool_allocator which should be used in most cases.
472    Another specialization may be needed, if object size is greater than
473    memory_block_pool::block_size (64 KB).  */
474 typedef base_pool_allocator <memory_block_pool> pool_allocator;
475 
476 /* Type based memory pool allocator.  */
477 template <typename T>
478 class object_allocator
479 {
480 public:
481   /* Default constructor for pool allocator called NAME.  */
object_allocator(const char * name CXX_MEM_STAT_INFO)482   object_allocator (const char *name CXX_MEM_STAT_INFO):
483     m_allocator (name, sizeof (T) PASS_MEM_STAT) {}
484 
485   inline void
release()486   release ()
487   {
488     m_allocator.release ();
489   }
490 
release_if_empty()491   inline void release_if_empty ()
492   {
493     m_allocator.release_if_empty ();
494   }
495 
496 
497   /* Allocate memory for instance of type T and call a default constructor.  */
498 
499   inline T *
allocate()500   allocate () ATTRIBUTE_MALLOC
501   {
502     return ::new (m_allocator.allocate ()) T;
503   }
504 
505   /* Allocate memory for instance of type T and return void * that
506      could be used in situations where a default constructor is not provided
507      by the class T.  */
508 
509   inline void *
allocate_raw()510   allocate_raw () ATTRIBUTE_MALLOC
511   {
512     return m_allocator.allocate ();
513   }
514 
515   inline void
remove(T * object)516   remove (T *object)
517   {
518     /* Call destructor.  */
519     object->~T ();
520 
521     m_allocator.remove (object);
522   }
523 
524   inline size_t
num_elts_current()525   num_elts_current ()
526   {
527     return m_allocator.num_elts_current ();
528   }
529 
530 private:
531   pool_allocator m_allocator;
532 };
533 
534 /* Store information about each particular alloc_pool.  Note that this
535    will underestimate the amount the amount of storage used by a small amount:
536    1) The overhead in a pool is not accounted for.
537    2) The unallocated elements in a block are not accounted for.  Note
538    that this can at worst case be one element smaller that the block
539    size for that pool.  */
540 struct alloc_pool_descriptor
541 {
542   /* Number of pools allocated.  */
543   unsigned long created;
544   /* Gross allocated storage.  */
545   unsigned long allocated;
546   /* Amount of currently active storage.  */
547   unsigned long current;
548   /* Peak amount of storage used.  */
549   unsigned long peak;
550   /* Size of element in the pool.  */
551   int elt_size;
552 };
553 
554 /* Helper for classes that do not provide default ctor.  */
555 
556 template <typename T>
557 inline void *
new(size_t,object_allocator<T> & a)558 operator new (size_t, object_allocator<T> &a)
559 {
560   return a.allocate_raw ();
561 }
562 
563 /* Hashtable mapping alloc_pool names to descriptors.  */
564 extern hash_map<const char *, alloc_pool_descriptor> *alloc_pool_hash;
565 
566 
567 #endif
568