1 /* Functions to support a pool of allocatable objects
2    Copyright (C) 1997-2019 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@cgsoftware.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 #ifndef ALLOC_POOL_H
21 #define ALLOC_POOL_H
22 
23 #include "memory-block.h"
24 #include "options.h"	    // for flag_checking
25 
26 extern void dump_alloc_pool_statistics (void);
27 
28 /* Flag indicates whether memory statistics are gathered any longer.  */
29 extern bool after_memory_report;
30 
31 typedef unsigned long ALLOC_POOL_ID_TYPE;
32 
33 /* Last used ID.  */
34 extern ALLOC_POOL_ID_TYPE last_id;
35 
36 /* Pool allocator memory usage.  */
37 struct pool_usage: public mem_usage
38 {
39   /* Default contructor.  */
pool_usagepool_usage40   pool_usage (): m_element_size (0), m_pool_name ("") {}
41   /* Constructor.  */
pool_usagepool_usage42   pool_usage (size_t allocated, size_t times, size_t peak,
43 	      size_t instances, size_t element_size,
44 	      const char *pool_name)
45     : mem_usage (allocated, times, peak, instances),
46       m_element_size (element_size),
47       m_pool_name (pool_name) {}
48 
49   /* Sum the usage with SECOND usage.  */
50   pool_usage
51   operator+ (const pool_usage &second)
52   {
53     return pool_usage (m_allocated + second.m_allocated,
54 			     m_times + second.m_times,
55 			     m_peak + second.m_peak,
56 			     m_instances + second.m_instances,
57 			     m_element_size, m_pool_name);
58   }
59 
60   /* Dump usage coupled to LOC location, where TOTAL is sum of all rows.  */
61   inline void
dumppool_usage62   dump (mem_location *loc, mem_usage &total) const
63   {
64     char *location_string = loc->to_string ();
65 
66     fprintf (stderr, "%-32s%-48s " PRsa(5) PRsa(9) ":%5.1f%%"
67 	     PRsa(9) PRsa(9) ":%5.1f%%%12" PRIu64 "\n",
68 	     m_pool_name, location_string,
69 	     SIZE_AMOUNT (m_instances),
70 	     SIZE_AMOUNT (m_allocated),
71 	     get_percent (m_allocated, total.m_allocated),
72 	     SIZE_AMOUNT (m_peak),
73 	     SIZE_AMOUNT (m_times),
74 	     get_percent (m_times, total.m_times),
75 	     (uint64_t)m_element_size);
76 
77     free (location_string);
78   }
79 
80   /* Dump header with NAME.  */
81   static inline void
dump_headerpool_usage82   dump_header (const char *name)
83   {
84     fprintf (stderr, "%-32s%-48s %6s%11s%16s%17s%12s\n", "Pool name", name,
85 	     "Pools", "Leak", "Peak", "Times", "Elt size");
86   }
87 
88   /* Dump footer.  */
89   inline void
dump_footerpool_usage90   dump_footer ()
91   {
92     fprintf (stderr, "%s" PRsa(82) PRsa(10) "\n", "Total",
93 	     SIZE_AMOUNT (m_instances), SIZE_AMOUNT (m_allocated));
94   }
95 
96   /* Element size.  */
97   size_t m_element_size;
98   /* Pool name.  */
99   const char *m_pool_name;
100 };
101 
102 extern mem_alloc_description<pool_usage> pool_allocator_usage;
103 
104 #if 0
105 /* If a pool with custom block size is needed, one might use the following
106    template.  An instance of this template can be used as a parameter for
107    instantiating base_pool_allocator template:
108 
109 	typedef custom_block_allocator <128*1024> huge_block_allocator;
110 	...
111 	static base_pool_allocator <huge_block_allocator>
112 						value_pool ("value", 16384);
113 
114    Right now it's not used anywhere in the code, and is given here as an
115    example).  */
116 
117 template <size_t BlockSize>
118 class custom_block_allocator
119 {
120 public:
121   static const size_t block_size = BlockSize;
122 
123   static inline void *
124   allocate () ATTRIBUTE_MALLOC
125   {
126     return XNEWVEC (char, BlockSize);
127   }
128 
129   static inline void
130   release (void *block)
131   {
132     XDELETEVEC (block);
133   }
134 };
135 #endif
136 
137 /* Generic pool allocator.  */
138 
139 template <typename TBlockAllocator>
140 class base_pool_allocator
141 {
142 public:
143   /* Default constructor for pool allocator called NAME.  */
144   base_pool_allocator (const char *name, size_t size CXX_MEM_STAT_INFO);
145   ~base_pool_allocator ();
146   void release ();
147   void release_if_empty ();
148   void *allocate () ATTRIBUTE_MALLOC;
149   void remove (void *object);
150   size_t num_elts_current ();
151 
152 private:
153   struct allocation_pool_list
154   {
155     allocation_pool_list *next;
156   };
157 
158   /* Initialize a pool allocator.  */
159   void initialize ();
160 
161   struct allocation_object
162   {
163 #if CHECKING_P
164     /* The ID of alloc pool which the object was allocated from.  */
165     ALLOC_POOL_ID_TYPE id;
166 #endif
167 
168     union
169       {
170 	/* The data of the object.  */
171 	char data[1];
172 
173 	/* Because we want any type of data to be well aligned after the ID,
174 	   the following elements are here.  They are never accessed so
175 	   the allocated object may be even smaller than this structure.
176 	   We do not care about alignment for floating-point types.  */
177 	char *align_p;
178 	int64_t align_i;
179       } u;
180 
181 #if CHECKING_P
182     static inline allocation_object*
get_instanceallocation_object183     get_instance (void *data_ptr)
184     {
185       return (allocation_object *)(((char *)(data_ptr))
186 				      - offsetof (allocation_object,
187 						  u.data));
188     }
189 #endif
190 
191     static inline void*
get_dataallocation_object192     get_data (void *instance_ptr)
193     {
194       return (void*)(((allocation_object *) instance_ptr)->u.data);
195     }
196   };
197 
198   /* Align X to 8.  */
199   static inline size_t
align_eight(size_t x)200   align_eight (size_t x)
201   {
202     return (((x+7) >> 3) << 3);
203   }
204 
205   const char *m_name;
206   ALLOC_POOL_ID_TYPE m_id;
207   size_t m_elts_per_block;
208 
209   /* These are the elements that have been allocated at least once
210      and freed.  */
211   allocation_pool_list *m_returned_free_list;
212 
213   /* These are the elements that have not yet been allocated out of
214      the last block obtained from XNEWVEC.  */
215   char* m_virgin_free_list;
216 
217   /* The number of elements in the virgin_free_list that can be
218      allocated before needing another block.  */
219   size_t m_virgin_elts_remaining;
220   /* The number of elements that are allocated.  */
221   size_t m_elts_allocated;
222   /* The number of elements that are released.  */
223   size_t m_elts_free;
224   /* The number of allocated blocks.  */
225   size_t m_blocks_allocated;
226   /* List of blocks that are used to allocate new objects.  */
227   allocation_pool_list *m_block_list;
228   /* Size of a pool elements in bytes.  */
229   size_t m_elt_size;
230   /* Size in bytes that should be allocated for each element.  */
231   size_t m_size;
232   /* Flag if a pool allocator is initialized.  */
233   bool m_initialized;
234   /* Memory allocation location.  */
235   mem_location m_location;
236 };
237 
238 template <typename TBlockAllocator>
239 inline
base_pool_allocator(const char * name,size_t size MEM_STAT_DECL)240 base_pool_allocator <TBlockAllocator>::base_pool_allocator (
241 				const char *name, size_t size MEM_STAT_DECL):
242   m_name (name), m_id (0), m_elts_per_block (0), m_returned_free_list (NULL),
243   m_virgin_free_list (NULL), m_virgin_elts_remaining (0), m_elts_allocated (0),
244   m_elts_free (0), m_blocks_allocated (0), m_block_list (NULL), m_elt_size (0),
245   m_size (size), m_initialized (false),
246   m_location (ALLOC_POOL_ORIGIN, false PASS_MEM_STAT) {}
247 
248 /* Initialize a pool allocator.  */
249 
250 template <typename TBlockAllocator>
251 inline void
initialize()252 base_pool_allocator <TBlockAllocator>::initialize ()
253 {
254   gcc_checking_assert (!m_initialized);
255   m_initialized = true;
256 
257   size_t size = m_size;
258 
259   gcc_checking_assert (m_name);
260   gcc_checking_assert (m_size);
261 
262   /* Make size large enough to store the list header.  */
263   if (size < sizeof (allocation_pool_list*))
264     size = sizeof (allocation_pool_list*);
265 
266   /* Now align the size to a multiple of 8.  */
267   size = align_eight (size);
268 
269   /* Add the aligned size of ID.  */
270   size += offsetof (allocation_object, u.data);
271 
272   m_elt_size = size;
273 
274   if (GATHER_STATISTICS)
275     {
276       pool_usage *u = pool_allocator_usage.register_descriptor
277 	(this, new mem_location (m_location));
278 
279       u->m_element_size = m_elt_size;
280       u->m_pool_name = m_name;
281     }
282 
283   /* List header size should be a multiple of 8.  */
284   size_t header_size = align_eight (sizeof (allocation_pool_list));
285 
286   m_elts_per_block = (TBlockAllocator::block_size - header_size) / size;
287   gcc_checking_assert (m_elts_per_block != 0);
288 
289   /* Increase the last used ID and use it for this pool.
290      ID == 0 is used for free elements of pool so skip it.  */
291   last_id++;
292   if (last_id == 0)
293     last_id++;
294 
295   m_id = last_id;
296 }
297 
298 /* Free all memory allocated for the given memory pool.  */
299 template <typename TBlockAllocator>
300 inline void
release()301 base_pool_allocator <TBlockAllocator>::release ()
302 {
303   if (!m_initialized)
304     return;
305 
306   allocation_pool_list *block, *next_block;
307 
308   /* Free each block allocated to the pool.  */
309   for (block = m_block_list; block != NULL; block = next_block)
310     {
311       next_block = block->next;
312       TBlockAllocator::release (block);
313     }
314 
315   if (GATHER_STATISTICS && !after_memory_report)
316     {
317       pool_allocator_usage.release_instance_overhead
318 	(this, (m_elts_allocated - m_elts_free) * m_elt_size);
319     }
320 
321   m_returned_free_list = NULL;
322   m_virgin_free_list = NULL;
323   m_virgin_elts_remaining = 0;
324   m_elts_allocated = 0;
325   m_elts_free = 0;
326   m_blocks_allocated = 0;
327   m_block_list = NULL;
328 }
329 
330 template <typename TBlockAllocator>
331 inline void
release_if_empty()332 base_pool_allocator <TBlockAllocator>::release_if_empty ()
333 {
334   if (m_elts_free == m_elts_allocated)
335     release ();
336 }
337 
338 template <typename TBlockAllocator>
~base_pool_allocator()339 inline base_pool_allocator <TBlockAllocator>::~base_pool_allocator ()
340 {
341   release ();
342 }
343 
344 /* Allocates one element from the pool specified.  */
345 template <typename TBlockAllocator>
346 inline void*
allocate()347 base_pool_allocator <TBlockAllocator>::allocate ()
348 {
349   if (!m_initialized)
350     initialize ();
351 
352   allocation_pool_list *header;
353 #ifdef ENABLE_VALGRIND_ANNOTATIONS
354   int size;
355 #endif
356 
357   if (GATHER_STATISTICS)
358     {
359       pool_allocator_usage.register_instance_overhead (m_elt_size, this);
360     }
361 
362 #ifdef ENABLE_VALGRIND_ANNOTATIONS
363   size = m_elt_size - offsetof (allocation_object, u.data);
364 #endif
365 
366   /* If there are no more free elements, make some more!.  */
367   if (!m_returned_free_list)
368     {
369       char *block;
370       if (!m_virgin_elts_remaining)
371 	{
372 	  allocation_pool_list *block_header;
373 
374 	  /* Make the block.  */
375 	  block = reinterpret_cast<char *> (TBlockAllocator::allocate ());
376 	  block_header = new (block) allocation_pool_list;
377 	  block += align_eight (sizeof (allocation_pool_list));
378 
379 	  /* Throw it on the block list.  */
380 	  block_header->next = m_block_list;
381 	  m_block_list = block_header;
382 
383 	  /* Make the block available for allocation.  */
384 	  m_virgin_free_list = block;
385 	  m_virgin_elts_remaining = m_elts_per_block;
386 
387 	  /* Also update the number of elements we have free/allocated, and
388 	     increment the allocated block count.  */
389 	  m_elts_allocated += m_elts_per_block;
390 	  m_elts_free += m_elts_per_block;
391 	  m_blocks_allocated += 1;
392 	}
393 
394       /* We now know that we can take the first elt off the virgin list and
395 	 put it on the returned list.  */
396       block = m_virgin_free_list;
397       header = (allocation_pool_list*) allocation_object::get_data (block);
398       header->next = NULL;
399 
400       /* Mark the element to be free.  */
401 #if CHECKING_P
402       ((allocation_object*) block)->id = 0;
403 #endif
404       VALGRIND_DISCARD (VALGRIND_MAKE_MEM_NOACCESS (header,size));
405       m_returned_free_list = header;
406       m_virgin_free_list += m_elt_size;
407       m_virgin_elts_remaining--;
408 
409     }
410 
411   /* Pull the first free element from the free list, and return it.  */
412   header = m_returned_free_list;
413   VALGRIND_DISCARD (VALGRIND_MAKE_MEM_DEFINED (header, sizeof (*header)));
414   m_returned_free_list = header->next;
415   m_elts_free--;
416 
417   /* Set the ID for element.  */
418 #if CHECKING_P
419   allocation_object::get_instance (header)->id = m_id;
420 #endif
421   VALGRIND_DISCARD (VALGRIND_MAKE_MEM_UNDEFINED (header, size));
422 
423   return (void *)(header);
424 }
425 
426 /* Puts PTR back on POOL's free list.  */
427 template <typename TBlockAllocator>
428 inline void
remove(void * object)429 base_pool_allocator <TBlockAllocator>::remove (void *object)
430 {
431   int size = m_elt_size - offsetof (allocation_object, u.data);
432 
433   if (flag_checking)
434     {
435       gcc_assert (m_initialized);
436       gcc_assert (object
437 		  /* Check if we free more than we allocated.  */
438 		  && m_elts_free < m_elts_allocated);
439 #if CHECKING_P
440       /* Check whether the PTR was allocated from POOL.  */
441       gcc_assert (m_id == allocation_object::get_instance (object)->id);
442 #endif
443 
444       memset (object, 0xaf, size);
445     }
446 
447 #if CHECKING_P
448   /* Mark the element to be free.  */
449   allocation_object::get_instance (object)->id = 0;
450 #endif
451 
452   allocation_pool_list *header = new (object) allocation_pool_list;
453   header->next = m_returned_free_list;
454   m_returned_free_list = header;
455   VALGRIND_DISCARD (VALGRIND_MAKE_MEM_NOACCESS (object, size));
456   m_elts_free++;
457 
458   if (GATHER_STATISTICS)
459     {
460       pool_allocator_usage.release_instance_overhead (this, m_elt_size);
461     }
462 }
463 
464 /* Number of elements currently active (not returned to pool).  Used for cheap
465    consistency checks.  */
466 template <typename TBlockAllocator>
467 inline size_t
num_elts_current()468 base_pool_allocator <TBlockAllocator>::num_elts_current ()
469 {
470   return m_elts_allocated - m_elts_free;
471 }
472 
473 /* Specialization of base_pool_allocator which should be used in most cases.
474    Another specialization may be needed, if object size is greater than
475    memory_block_pool::block_size (64 KB).  */
476 typedef base_pool_allocator <memory_block_pool> pool_allocator;
477 
478 /* Type based memory pool allocator.  */
479 template <typename T>
480 class object_allocator
481 {
482 public:
483   /* Default constructor for pool allocator called NAME.  */
object_allocator(const char * name CXX_MEM_STAT_INFO)484   object_allocator (const char *name CXX_MEM_STAT_INFO):
485     m_allocator (name, sizeof (T) PASS_MEM_STAT) {}
486 
487   inline void
release()488   release ()
489   {
490     m_allocator.release ();
491   }
492 
release_if_empty()493   inline void release_if_empty ()
494   {
495     m_allocator.release_if_empty ();
496   }
497 
498 
499   /* Allocate memory for instance of type T and call a default constructor.  */
500 
501   inline T *
allocate()502   allocate () ATTRIBUTE_MALLOC
503   {
504     return ::new (m_allocator.allocate ()) T;
505   }
506 
507   /* Allocate memory for instance of type T and return void * that
508      could be used in situations where a default constructor is not provided
509      by the class T.  */
510 
511   inline void *
allocate_raw()512   allocate_raw () ATTRIBUTE_MALLOC
513   {
514     return m_allocator.allocate ();
515   }
516 
517   inline void
remove(T * object)518   remove (T *object)
519   {
520     /* Call destructor.  */
521     object->~T ();
522 
523     m_allocator.remove (object);
524   }
525 
526   inline size_t
num_elts_current()527   num_elts_current ()
528   {
529     return m_allocator.num_elts_current ();
530   }
531 
532 private:
533   pool_allocator m_allocator;
534 };
535 
536 /* Store information about each particular alloc_pool.  Note that this
537    will underestimate the amount the amount of storage used by a small amount:
538    1) The overhead in a pool is not accounted for.
539    2) The unallocated elements in a block are not accounted for.  Note
540    that this can at worst case be one element smaller that the block
541    size for that pool.  */
542 struct alloc_pool_descriptor
543 {
544   /* Number of pools allocated.  */
545   unsigned long created;
546   /* Gross allocated storage.  */
547   unsigned long allocated;
548   /* Amount of currently active storage.  */
549   unsigned long current;
550   /* Peak amount of storage used.  */
551   unsigned long peak;
552   /* Size of element in the pool.  */
553   int elt_size;
554 };
555 
556 /* Helper for classes that do not provide default ctor.  */
557 
558 template <typename T>
559 inline void *
new(size_t,object_allocator<T> & a)560 operator new (size_t, object_allocator<T> &a)
561 {
562   return a.allocate_raw ();
563 }
564 
565 /* Hashtable mapping alloc_pool names to descriptors.  */
566 extern hash_map<const char *, alloc_pool_descriptor> *alloc_pool_hash;
567 
568 
569 #endif
570