1 // -*- C++ -*- Allocate exception objects.
2 // Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 //
4 // This file is part of GCC.
5 //
6 // GCC is free software; you can redistribute it and/or modify
7 // it under the terms of the GNU General Public License as published by
8 // the Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 //
11 // GCC is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 // GNU General Public License for more details.
15 //
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 // This is derived from the C++ ABI for IA-64.  Where we diverge
26 // for cross-architecture compatibility are noted with "@@@".
27 
28 #include <bits/c++config.h>
29 #include <cstdlib>
30 #if _GLIBCXX_HOSTED
31 #include <cstring>
32 #endif
33 #include <climits>
34 #include <exception>
35 #include "unwind-cxx.h"
36 #include <ext/concurrence.h>
37 #include <new>
38 
39 #if _GLIBCXX_HOSTED
40 using std::free;
41 using std::malloc;
42 using std::memset;
43 #else
44 // In a freestanding environment, these functions may not be available
45 // -- but for now, we assume that they are.
46 extern "C" void *malloc (std::size_t);
47 extern "C" void free(void *);
48 extern "C" void *memset (void *, int, std::size_t);
49 #endif
50 
51 using namespace __cxxabiv1;
52 
53 // ??? How to control these parameters.
54 
55 // Guess from the size of basic types how large a buffer is reasonable.
56 // Note that the basic c++ exception header has 13 pointers and 2 ints,
57 // so on a system with PSImode pointers we're talking about 56 bytes
58 // just for overhead.
59 
60 #if INT_MAX == 32767
61 # define EMERGENCY_OBJ_SIZE	128
62 # define EMERGENCY_OBJ_COUNT	16
63 #elif !defined (_GLIBCXX_LLP64) && LONG_MAX == 2147483647
64 # define EMERGENCY_OBJ_SIZE	512
65 # define EMERGENCY_OBJ_COUNT	32
66 #else
67 # define EMERGENCY_OBJ_SIZE	1024
68 # define EMERGENCY_OBJ_COUNT	64
69 #endif
70 
71 #ifndef __GTHREADS
72 # undef EMERGENCY_OBJ_COUNT
73 # define EMERGENCY_OBJ_COUNT	4
74 #endif
75 
76 namespace __gnu_cxx
77 {
78   void __freeres();
79 }
80 
81 namespace
82 {
83   // A fixed-size heap, variable size object allocator
84   class pool
85     {
86     public:
87       pool();
88 
89       void *allocate (std::size_t);
90       void free (void *);
91 
92       bool in_pool (void *);
93 
94     private:
95       struct free_entry {
96 	std::size_t size;
97 	free_entry *next;
98       };
99       struct allocated_entry {
100 	std::size_t size;
101 	char data[] __attribute__((aligned));
102       };
103 
104       // A single mutex controlling emergency allocations.
105       __gnu_cxx::__mutex emergency_mutex;
106 
107       // The free-list
108       free_entry *first_free_entry;
109       // The arena itself - we need to keep track of these only
110       // to implement in_pool.
111       char *arena;
112       std::size_t arena_size;
113 
114       friend void __gnu_cxx::__freeres();
115     };
116 
pool()117   pool::pool()
118     {
119       // Allocate the arena - we could add a GLIBCXX_EH_ARENA_SIZE environment
120       // to make this tunable.
121       arena_size = (EMERGENCY_OBJ_SIZE * EMERGENCY_OBJ_COUNT
122 		    + EMERGENCY_OBJ_COUNT * sizeof (__cxa_dependent_exception));
123       arena = (char *)malloc (arena_size);
124       if (!arena)
125 	{
126 	  // If the allocation failed go without an emergency pool.
127 	  arena_size = 0;
128 	  first_free_entry = NULL;
129 	  return;
130 	}
131 
132       // Populate the free-list with a single entry covering the whole arena
133       first_free_entry = reinterpret_cast <free_entry *> (arena);
134       new (first_free_entry) free_entry;
135       first_free_entry->size = arena_size;
136       first_free_entry->next = NULL;
137     }
138 
allocate(std::size_t size)139   void *pool::allocate (std::size_t size)
140     {
141       __gnu_cxx::__scoped_lock sentry(emergency_mutex);
142       // We need an additional size_t member plus the padding to
143       // ensure proper alignment of data.
144       size += offsetof (allocated_entry, data);
145       // And we need to at least hand out objects of the size of
146       // a freelist entry.
147       if (size < sizeof (free_entry))
148 	size = sizeof (free_entry);
149       // And we need to align objects we hand out to the maximum
150       // alignment required on the target (this really aligns the
151       // tail which will become a new freelist entry).
152       size = ((size + __alignof__ (allocated_entry::data) - 1)
153 	      & ~(__alignof__ (allocated_entry::data) - 1));
154       // Search for an entry of proper size on the freelist.
155       free_entry **e;
156       for (e = &first_free_entry;
157 	   *e && (*e)->size < size;
158 	   e = &(*e)->next)
159 	;
160       if (!*e)
161 	return NULL;
162       allocated_entry *x;
163       if ((*e)->size - size >= sizeof (free_entry))
164 	{
165 	  // Slit block if it is too large.
166 	  free_entry *f = reinterpret_cast <free_entry *>
167 	      (reinterpret_cast <char *> (*e) + size);
168 	  std::size_t sz = (*e)->size;
169 	  free_entry *next = (*e)->next;
170 	  new (f) free_entry;
171 	  f->next = next;
172 	  f->size = sz - size;
173 	  x = reinterpret_cast <allocated_entry *> (*e);
174 	  new (x) allocated_entry;
175 	  x->size = size;
176 	  *e = f;
177 	}
178       else
179 	{
180 	  // Exact size match or too small overhead for a free entry.
181 	  std::size_t sz = (*e)->size;
182 	  free_entry *next = (*e)->next;
183 	  x = reinterpret_cast <allocated_entry *> (*e);
184 	  new (x) allocated_entry;
185 	  x->size = sz;
186 	  *e = next;
187 	}
188       return &x->data;
189     }
190 
free(void * data)191   void pool::free (void *data)
192     {
193       __gnu_cxx::__scoped_lock sentry(emergency_mutex);
194       allocated_entry *e = reinterpret_cast <allocated_entry *>
195 	(reinterpret_cast <char *> (data) - offsetof (allocated_entry, data));
196       std::size_t sz = e->size;
197       if (!first_free_entry)
198 	{
199 	  // If the free list is empty just put the entry there.
200 	  free_entry *f = reinterpret_cast <free_entry *> (e);
201 	  new (f) free_entry;
202 	  f->size = sz;
203 	  f->next = NULL;
204 	  first_free_entry = f;
205 	}
206       else if (reinterpret_cast <char *> (e) + sz
207 	       == reinterpret_cast <char *> (first_free_entry))
208 	{
209 	  // Check if we can merge with the first free entry being right
210 	  // after us.
211 	  free_entry *f = reinterpret_cast <free_entry *> (e);
212 	  new (f) free_entry;
213 	  f->size = sz + first_free_entry->size;
214 	  f->next = first_free_entry->next;
215 	  first_free_entry = f;
216 	}
217       else
218 	{
219 	  // Else search for a free item we can merge with at its end.
220 	  free_entry **fe;
221 	  for (fe = &first_free_entry;
222 	       (*fe)->next
223 	       && (reinterpret_cast <char *> ((*fe)->next)
224 		   > reinterpret_cast <char *> (e) + sz);
225 	       fe = &(*fe)->next)
226 	    ;
227 	  if (reinterpret_cast <char *> (*fe) + (*fe)->size
228 	      == reinterpret_cast <char *> (e))
229 	    /* Merge with the freelist entry.  */
230 	    (*fe)->size += sz;
231 	  else
232 	    {
233 	      // Else put it after it which keeps the freelist sorted.
234 	      free_entry *f = reinterpret_cast <free_entry *> (e);
235 	      new (f) free_entry;
236 	      f->size = sz;
237 	      f->next = (*fe)->next;
238 	      (*fe)->next = f;
239 	    }
240 	}
241     }
242 
in_pool(void * ptr)243   bool pool::in_pool (void *ptr)
244     {
245       char *p = reinterpret_cast <char *> (ptr);
246       return (p > arena
247 	      && p < arena + arena_size);
248     }
249 
250   pool emergency_pool;
251 }
252 
253 namespace __gnu_cxx
254 {
255   void
__freeres()256   __freeres()
257   {
258     if (emergency_pool.arena)
259       {
260 	::free(emergency_pool.arena);
261 	emergency_pool.arena = 0;
262       }
263   }
264 }
265 
266 extern "C" void *
__cxa_allocate_exception(std::size_t thrown_size)267 __cxxabiv1::__cxa_allocate_exception(std::size_t thrown_size) _GLIBCXX_NOTHROW
268 {
269   void *ret;
270 
271   thrown_size += sizeof (__cxa_refcounted_exception);
272   ret = malloc (thrown_size);
273 
274   if (!ret)
275     ret = emergency_pool.allocate (thrown_size);
276 
277   if (!ret)
278     std::terminate ();
279 
280   memset (ret, 0, sizeof (__cxa_refcounted_exception));
281 
282   return (void *)((char *)ret + sizeof (__cxa_refcounted_exception));
283 }
284 
285 
286 extern "C" void
__cxa_free_exception(void * vptr)287 __cxxabiv1::__cxa_free_exception(void *vptr) _GLIBCXX_NOTHROW
288 {
289   char *ptr = (char *) vptr - sizeof (__cxa_refcounted_exception);
290   if (emergency_pool.in_pool (ptr))
291     emergency_pool.free (ptr);
292   else
293     free (ptr);
294 }
295 
296 
297 extern "C" __cxa_dependent_exception*
__cxa_allocate_dependent_exception()298 __cxxabiv1::__cxa_allocate_dependent_exception() _GLIBCXX_NOTHROW
299 {
300   __cxa_dependent_exception *ret;
301 
302   ret = static_cast<__cxa_dependent_exception*>
303     (malloc (sizeof (__cxa_dependent_exception)));
304 
305   if (!ret)
306     ret = static_cast <__cxa_dependent_exception*>
307       (emergency_pool.allocate (sizeof (__cxa_dependent_exception)));
308 
309   if (!ret)
310     std::terminate ();
311 
312   memset (ret, 0, sizeof (__cxa_dependent_exception));
313 
314   return ret;
315 }
316 
317 
318 extern "C" void
__cxa_free_dependent_exception(__cxa_dependent_exception * vptr)319 __cxxabiv1::__cxa_free_dependent_exception
320   (__cxa_dependent_exception *vptr) _GLIBCXX_NOTHROW
321 {
322   if (emergency_pool.in_pool (vptr))
323     emergency_pool.free (vptr);
324   else
325     free (vptr);
326 }
327