1 /* Implements exception handling.
2    Copyright (C) 1989-2014 Free Software Foundation, Inc.
3    Contributed by Mike Stump <mrs@cygnus.com>.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 
22 /* An exception is an event that can be "thrown" from within a
23    function.  This event can then be "caught" by the callers of
24    the function.
25 
26    The representation of exceptions changes several times during
27    the compilation process:
28 
29    In the beginning, in the front end, we have the GENERIC trees
30    TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31    CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32 
33    During initial gimplification (gimplify.c) these are lowered
34    to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35    The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36    into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
37    conversion.
38 
39    During pass_lower_eh (tree-eh.c) we record the nested structure
40    of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41    We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42    regions at this time.  We can then flatten the statements within
43    the TRY nodes to straight-line code.  Statements that had been within
44    TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45    so that we may remember what action is supposed to be taken if
46    a given statement does throw.  During this lowering process,
47    we create an EH_LANDING_PAD node for each EH_REGION that has
48    some code within the function that needs to be executed if a
49    throw does happen.  We also create RESX statements that are
50    used to transfer control from an inner EH_REGION to an outer
51    EH_REGION.  We also create EH_DISPATCH statements as placeholders
52    for a runtime type comparison that should be made in order to
53    select the action to perform among different CATCH and EH_FILTER
54    regions.
55 
56    During pass_lower_eh_dispatch (tree-eh.c), which is run after
57    all inlining is complete, we are able to run assign_filter_values,
58    which allows us to map the set of types manipulated by all of the
59    CATCH and EH_FILTER regions to a set of integers.  This set of integers
60    will be how the exception runtime communicates with the code generated
61    within the function.  We then expand the GIMPLE_EH_DISPATCH statements
62    to a switch or conditional branches that use the argument provided by
63    the runtime (__builtin_eh_filter) and the set of integers we computed
64    in assign_filter_values.
65 
66    During pass_lower_resx (tree-eh.c), which is run near the end
67    of optimization, we expand RESX statements.  If the eh region
68    that is outer to the RESX statement is a MUST_NOT_THROW, then
69    the RESX expands to some form of abort statement.  If the eh
70    region that is outer to the RESX statement is within the current
71    function, then the RESX expands to a bookkeeping call
72    (__builtin_eh_copy_values) and a goto.  Otherwise, the next
73    handler for the exception must be within a function somewhere
74    up the call chain, so we call back into the exception runtime
75    (__builtin_unwind_resume).
76 
77    During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78    that create an rtl to eh_region mapping that corresponds to the
79    gimple to eh_region mapping that had been recorded in the
80    THROW_STMT_TABLE.
81 
82    Then, via finish_eh_generation, we generate the real landing pads
83    to which the runtime will actually transfer control.  These new
84    landing pads perform whatever bookkeeping is needed by the target
85    backend in order to resume execution within the current function.
86    Each of these new landing pads falls through into the post_landing_pad
87    label which had been used within the CFG up to this point.  All
88    exception edges within the CFG are redirected to the new landing pads.
89    If the target uses setjmp to implement exceptions, the various extra
90    calls into the runtime to register and unregister the current stack
91    frame are emitted at this time.
92 
93    During pass_convert_to_eh_region_ranges (except.c), we transform
94    the REG_EH_REGION notes attached to individual insns into
95    non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96    and NOTE_INSN_EH_REGION_END.  Each insn within such ranges has the
97    same associated action within the exception region tree, meaning
98    that (1) the exception is caught by the same landing pad within the
99    current function, (2) the exception is blocked by the runtime with
100    a MUST_NOT_THROW region, or (3) the exception is not handled at all
101    within the current function.
102 
103    Finally, during assembly generation, we call
104    output_function_exception_table (except.c) to emit the tables with
105    which the exception runtime can determine if a given stack frame
106    handles a given exception, and if so what filter value to provide
107    to the function when the non-local control transfer is effected.
108    If the target uses dwarf2 unwinding to implement exceptions, then
109    output_call_frame_info (dwarf2out.c) emits the required unwind data.  */
110 
111 
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "tm.h"
116 #include "rtl.h"
117 #include "tree.h"
118 #include "stringpool.h"
119 #include "stor-layout.h"
120 #include "flags.h"
121 #include "function.h"
122 #include "expr.h"
123 #include "libfuncs.h"
124 #include "insn-config.h"
125 #include "except.h"
126 #include "hard-reg-set.h"
127 #include "output.h"
128 #include "dwarf2asm.h"
129 #include "dwarf2out.h"
130 #include "dwarf2.h"
131 #include "toplev.h"
132 #include "hash-table.h"
133 #include "intl.h"
134 #include "tm_p.h"
135 #include "target.h"
136 #include "common/common-target.h"
137 #include "langhooks.h"
138 #include "cgraph.h"
139 #include "diagnostic.h"
140 #include "tree-pretty-print.h"
141 #include "tree-pass.h"
142 #include "pointer-set.h"
143 #include "cfgloop.h"
144 
145 /* Provide defaults for stuff that may not be defined when using
146    sjlj exceptions.  */
147 #ifndef EH_RETURN_DATA_REGNO
148 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
149 #endif
150 
151 static GTY(()) int call_site_base;
152 static GTY ((param_is (union tree_node)))
153   htab_t type_to_runtime_map;
154 
155 /* Describe the SjLj_Function_Context structure.  */
156 static GTY(()) tree sjlj_fc_type_node;
157 static int sjlj_fc_call_site_ofs;
158 static int sjlj_fc_data_ofs;
159 static int sjlj_fc_personality_ofs;
160 static int sjlj_fc_lsda_ofs;
161 static int sjlj_fc_jbuf_ofs;
162 
163 
164 struct GTY(()) call_site_record_d
165 {
166   rtx landing_pad;
167   int action;
168 };
169 
170 /* In the following structure and associated functions,
171    we represent entries in the action table as 1-based indices.
172    Special cases are:
173 
174 	 0:	null action record, non-null landing pad; implies cleanups
175 	-1:	null action record, null landing pad; implies no action
176 	-2:	no call-site entry; implies must_not_throw
177 	-3:	we have yet to process outer regions
178 
179    Further, no special cases apply to the "next" field of the record.
180    For next, 0 means end of list.  */
181 
182 struct action_record
183 {
184   int offset;
185   int filter;
186   int next;
187 };
188 
189 /* Hashtable helpers.  */
190 
191 struct action_record_hasher : typed_free_remove <action_record>
192 {
193   typedef action_record value_type;
194   typedef action_record compare_type;
195   static inline hashval_t hash (const value_type *);
196   static inline bool equal (const value_type *, const compare_type *);
197 };
198 
199 inline hashval_t
hash(const value_type * entry)200 action_record_hasher::hash (const value_type *entry)
201 {
202   return entry->next * 1009 + entry->filter;
203 }
204 
205 inline bool
equal(const value_type * entry,const compare_type * data)206 action_record_hasher::equal (const value_type *entry, const compare_type *data)
207 {
208   return entry->filter == data->filter && entry->next == data->next;
209 }
210 
211 typedef hash_table <action_record_hasher> action_hash_type;
212 
213 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
214 					   eh_landing_pad *);
215 
216 static int t2r_eq (const void *, const void *);
217 static hashval_t t2r_hash (const void *);
218 
219 static void dw2_build_landing_pads (void);
220 
221 static int collect_one_action_chain (action_hash_type, eh_region);
222 static int add_call_site (rtx, int, int);
223 
224 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
225 static void push_sleb128 (vec<uchar, va_gc> **, int);
226 #ifndef HAVE_AS_LEB128
227 static int dw2_size_of_call_site_table (int);
228 static int sjlj_size_of_call_site_table (void);
229 #endif
230 static void dw2_output_call_site_table (int, int);
231 static void sjlj_output_call_site_table (void);
232 
233 
234 void
init_eh(void)235 init_eh (void)
236 {
237   if (! flag_exceptions)
238     return;
239 
240   type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
241 
242   /* Create the SjLj_Function_Context structure.  This should match
243      the definition in unwind-sjlj.c.  */
244   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
245     {
246       tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
247 
248       sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
249 
250       f_prev = build_decl (BUILTINS_LOCATION,
251 			   FIELD_DECL, get_identifier ("__prev"),
252 			   build_pointer_type (sjlj_fc_type_node));
253       DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
254 
255       f_cs = build_decl (BUILTINS_LOCATION,
256 			 FIELD_DECL, get_identifier ("__call_site"),
257 			 integer_type_node);
258       DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
259 
260       tmp = build_index_type (size_int (4 - 1));
261       tmp = build_array_type (lang_hooks.types.type_for_mode
262 				(targetm.unwind_word_mode (), 1),
263 			      tmp);
264       f_data = build_decl (BUILTINS_LOCATION,
265 			   FIELD_DECL, get_identifier ("__data"), tmp);
266       DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
267 
268       f_per = build_decl (BUILTINS_LOCATION,
269 			  FIELD_DECL, get_identifier ("__personality"),
270 			  ptr_type_node);
271       DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
272 
273       f_lsda = build_decl (BUILTINS_LOCATION,
274 			   FIELD_DECL, get_identifier ("__lsda"),
275 			   ptr_type_node);
276       DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
277 
278 #ifdef DONT_USE_BUILTIN_SETJMP
279 #ifdef JMP_BUF_SIZE
280       tmp = size_int (JMP_BUF_SIZE - 1);
281 #else
282       /* Should be large enough for most systems, if it is not,
283 	 JMP_BUF_SIZE should be defined with the proper value.  It will
284 	 also tend to be larger than necessary for most systems, a more
285 	 optimal port will define JMP_BUF_SIZE.  */
286       tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
287 #endif
288 #else
289       /* builtin_setjmp takes a pointer to 5 words.  */
290       tmp = size_int (5 * BITS_PER_WORD / POINTER_SIZE - 1);
291 #endif
292       tmp = build_index_type (tmp);
293       tmp = build_array_type (ptr_type_node, tmp);
294       f_jbuf = build_decl (BUILTINS_LOCATION,
295 			   FIELD_DECL, get_identifier ("__jbuf"), tmp);
296 #ifdef DONT_USE_BUILTIN_SETJMP
297       /* We don't know what the alignment requirements of the
298 	 runtime's jmp_buf has.  Overestimate.  */
299       DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
300       DECL_USER_ALIGN (f_jbuf) = 1;
301 #endif
302       DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
303 
304       TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
305       TREE_CHAIN (f_prev) = f_cs;
306       TREE_CHAIN (f_cs) = f_data;
307       TREE_CHAIN (f_data) = f_per;
308       TREE_CHAIN (f_per) = f_lsda;
309       TREE_CHAIN (f_lsda) = f_jbuf;
310 
311       layout_type (sjlj_fc_type_node);
312 
313       /* Cache the interesting field offsets so that we have
314 	 easy access from rtl.  */
315       sjlj_fc_call_site_ofs
316 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
317 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
318       sjlj_fc_data_ofs
319 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
320 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
321       sjlj_fc_personality_ofs
322 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
323 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
324       sjlj_fc_lsda_ofs
325 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
326 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
327       sjlj_fc_jbuf_ofs
328 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
329 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
330     }
331 }
332 
333 void
init_eh_for_function(void)334 init_eh_for_function (void)
335 {
336   cfun->eh = ggc_alloc_cleared_eh_status ();
337 
338   /* Make sure zero'th entries are used.  */
339   vec_safe_push (cfun->eh->region_array, (eh_region)0);
340   vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
341 }
342 
343 /* Routines to generate the exception tree somewhat directly.
344    These are used from tree-eh.c when processing exception related
345    nodes during tree optimization.  */
346 
347 static eh_region
gen_eh_region(enum eh_region_type type,eh_region outer)348 gen_eh_region (enum eh_region_type type, eh_region outer)
349 {
350   eh_region new_eh;
351 
352   /* Insert a new blank region as a leaf in the tree.  */
353   new_eh = ggc_alloc_cleared_eh_region_d ();
354   new_eh->type = type;
355   new_eh->outer = outer;
356   if (outer)
357     {
358       new_eh->next_peer = outer->inner;
359       outer->inner = new_eh;
360     }
361   else
362     {
363       new_eh->next_peer = cfun->eh->region_tree;
364       cfun->eh->region_tree = new_eh;
365     }
366 
367   new_eh->index = vec_safe_length (cfun->eh->region_array);
368   vec_safe_push (cfun->eh->region_array, new_eh);
369 
370   /* Copy the language's notion of whether to use __cxa_end_cleanup.  */
371   if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
372     new_eh->use_cxa_end_cleanup = true;
373 
374   return new_eh;
375 }
376 
377 eh_region
gen_eh_region_cleanup(eh_region outer)378 gen_eh_region_cleanup (eh_region outer)
379 {
380   return gen_eh_region (ERT_CLEANUP, outer);
381 }
382 
383 eh_region
gen_eh_region_try(eh_region outer)384 gen_eh_region_try (eh_region outer)
385 {
386   return gen_eh_region (ERT_TRY, outer);
387 }
388 
389 eh_catch
gen_eh_region_catch(eh_region t,tree type_or_list)390 gen_eh_region_catch (eh_region t, tree type_or_list)
391 {
392   eh_catch c, l;
393   tree type_list, type_node;
394 
395   gcc_assert (t->type == ERT_TRY);
396 
397   /* Ensure to always end up with a type list to normalize further
398      processing, then register each type against the runtime types map.  */
399   type_list = type_or_list;
400   if (type_or_list)
401     {
402       if (TREE_CODE (type_or_list) != TREE_LIST)
403 	type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
404 
405       type_node = type_list;
406       for (; type_node; type_node = TREE_CHAIN (type_node))
407 	add_type_for_runtime (TREE_VALUE (type_node));
408     }
409 
410   c = ggc_alloc_cleared_eh_catch_d ();
411   c->type_list = type_list;
412   l = t->u.eh_try.last_catch;
413   c->prev_catch = l;
414   if (l)
415     l->next_catch = c;
416   else
417     t->u.eh_try.first_catch = c;
418   t->u.eh_try.last_catch = c;
419 
420   return c;
421 }
422 
423 eh_region
gen_eh_region_allowed(eh_region outer,tree allowed)424 gen_eh_region_allowed (eh_region outer, tree allowed)
425 {
426   eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
427   region->u.allowed.type_list = allowed;
428 
429   for (; allowed ; allowed = TREE_CHAIN (allowed))
430     add_type_for_runtime (TREE_VALUE (allowed));
431 
432   return region;
433 }
434 
435 eh_region
gen_eh_region_must_not_throw(eh_region outer)436 gen_eh_region_must_not_throw (eh_region outer)
437 {
438   return gen_eh_region (ERT_MUST_NOT_THROW, outer);
439 }
440 
441 eh_landing_pad
gen_eh_landing_pad(eh_region region)442 gen_eh_landing_pad (eh_region region)
443 {
444   eh_landing_pad lp = ggc_alloc_cleared_eh_landing_pad_d ();
445 
446   lp->next_lp = region->landing_pads;
447   lp->region = region;
448   lp->index = vec_safe_length (cfun->eh->lp_array);
449   region->landing_pads = lp;
450 
451   vec_safe_push (cfun->eh->lp_array, lp);
452 
453   return lp;
454 }
455 
456 eh_region
get_eh_region_from_number_fn(struct function * ifun,int i)457 get_eh_region_from_number_fn (struct function *ifun, int i)
458 {
459   return (*ifun->eh->region_array)[i];
460 }
461 
462 eh_region
get_eh_region_from_number(int i)463 get_eh_region_from_number (int i)
464 {
465   return get_eh_region_from_number_fn (cfun, i);
466 }
467 
468 eh_landing_pad
get_eh_landing_pad_from_number_fn(struct function * ifun,int i)469 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
470 {
471   return (*ifun->eh->lp_array)[i];
472 }
473 
474 eh_landing_pad
get_eh_landing_pad_from_number(int i)475 get_eh_landing_pad_from_number (int i)
476 {
477   return get_eh_landing_pad_from_number_fn (cfun, i);
478 }
479 
480 eh_region
get_eh_region_from_lp_number_fn(struct function * ifun,int i)481 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
482 {
483   if (i < 0)
484     return (*ifun->eh->region_array)[-i];
485   else if (i == 0)
486     return NULL;
487   else
488     {
489       eh_landing_pad lp;
490       lp = (*ifun->eh->lp_array)[i];
491       return lp->region;
492     }
493 }
494 
495 eh_region
get_eh_region_from_lp_number(int i)496 get_eh_region_from_lp_number (int i)
497 {
498   return get_eh_region_from_lp_number_fn (cfun, i);
499 }
500 
501 /* Returns true if the current function has exception handling regions.  */
502 
503 bool
current_function_has_exception_handlers(void)504 current_function_has_exception_handlers (void)
505 {
506   return cfun->eh->region_tree != NULL;
507 }
508 
509 /* A subroutine of duplicate_eh_regions.  Copy the eh_region tree at OLD.
510    Root it at OUTER, and apply LP_OFFSET to the lp numbers.  */
511 
512 struct duplicate_eh_regions_data
513 {
514   duplicate_eh_regions_map label_map;
515   void *label_map_data;
516   struct pointer_map_t *eh_map;
517 };
518 
519 static void
duplicate_eh_regions_1(struct duplicate_eh_regions_data * data,eh_region old_r,eh_region outer)520 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
521 			eh_region old_r, eh_region outer)
522 {
523   eh_landing_pad old_lp, new_lp;
524   eh_region new_r;
525   void **slot;
526 
527   new_r = gen_eh_region (old_r->type, outer);
528   slot = pointer_map_insert (data->eh_map, (void *)old_r);
529   gcc_assert (*slot == NULL);
530   *slot = (void *)new_r;
531 
532   switch (old_r->type)
533     {
534     case ERT_CLEANUP:
535       break;
536 
537     case ERT_TRY:
538       {
539 	eh_catch oc, nc;
540 	for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
541 	  {
542 	    /* We should be doing all our region duplication before and
543 	       during inlining, which is before filter lists are created.  */
544 	    gcc_assert (oc->filter_list == NULL);
545 	    nc = gen_eh_region_catch (new_r, oc->type_list);
546 	    nc->label = data->label_map (oc->label, data->label_map_data);
547 	  }
548       }
549       break;
550 
551     case ERT_ALLOWED_EXCEPTIONS:
552       new_r->u.allowed.type_list = old_r->u.allowed.type_list;
553       if (old_r->u.allowed.label)
554 	new_r->u.allowed.label
555 	    = data->label_map (old_r->u.allowed.label, data->label_map_data);
556       else
557 	new_r->u.allowed.label = NULL_TREE;
558       break;
559 
560     case ERT_MUST_NOT_THROW:
561       new_r->u.must_not_throw.failure_loc =
562 	LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
563       new_r->u.must_not_throw.failure_decl =
564 	old_r->u.must_not_throw.failure_decl;
565       break;
566     }
567 
568   for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
569     {
570       /* Don't bother copying unused landing pads.  */
571       if (old_lp->post_landing_pad == NULL)
572 	continue;
573 
574       new_lp = gen_eh_landing_pad (new_r);
575       slot = pointer_map_insert (data->eh_map, (void *)old_lp);
576       gcc_assert (*slot == NULL);
577       *slot = (void *)new_lp;
578 
579       new_lp->post_landing_pad
580 	= data->label_map (old_lp->post_landing_pad, data->label_map_data);
581       EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
582     }
583 
584   /* Make sure to preserve the original use of __cxa_end_cleanup.  */
585   new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
586 
587   for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
588     duplicate_eh_regions_1 (data, old_r, new_r);
589 }
590 
591 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
592    the current function and root the tree below OUTER_REGION.
593    The special case of COPY_REGION of NULL means all regions.
594    Remap labels using MAP/MAP_DATA callback.  Return a pointer map
595    that allows the caller to remap uses of both EH regions and
596    EH landing pads.  */
597 
598 struct pointer_map_t *
duplicate_eh_regions(struct function * ifun,eh_region copy_region,int outer_lp,duplicate_eh_regions_map map,void * map_data)599 duplicate_eh_regions (struct function *ifun,
600 		      eh_region copy_region, int outer_lp,
601 		      duplicate_eh_regions_map map, void *map_data)
602 {
603   struct duplicate_eh_regions_data data;
604   eh_region outer_region;
605 
606 #ifdef ENABLE_CHECKING
607   verify_eh_tree (ifun);
608 #endif
609 
610   data.label_map = map;
611   data.label_map_data = map_data;
612   data.eh_map = pointer_map_create ();
613 
614   outer_region = get_eh_region_from_lp_number (outer_lp);
615 
616   /* Copy all the regions in the subtree.  */
617   if (copy_region)
618     duplicate_eh_regions_1 (&data, copy_region, outer_region);
619   else
620     {
621       eh_region r;
622       for (r = ifun->eh->region_tree; r ; r = r->next_peer)
623 	duplicate_eh_regions_1 (&data, r, outer_region);
624     }
625 
626 #ifdef ENABLE_CHECKING
627   verify_eh_tree (cfun);
628 #endif
629 
630   return data.eh_map;
631 }
632 
633 /* Return the region that is outer to both REGION_A and REGION_B in IFUN.  */
634 
635 eh_region
eh_region_outermost(struct function * ifun,eh_region region_a,eh_region region_b)636 eh_region_outermost (struct function *ifun, eh_region region_a,
637 		     eh_region region_b)
638 {
639   sbitmap b_outer;
640 
641   gcc_assert (ifun->eh->region_array);
642   gcc_assert (ifun->eh->region_tree);
643 
644   b_outer = sbitmap_alloc (ifun->eh->region_array->length ());
645   bitmap_clear (b_outer);
646 
647   do
648     {
649       bitmap_set_bit (b_outer, region_b->index);
650       region_b = region_b->outer;
651     }
652   while (region_b);
653 
654   do
655     {
656       if (bitmap_bit_p (b_outer, region_a->index))
657 	break;
658       region_a = region_a->outer;
659     }
660   while (region_a);
661 
662   sbitmap_free (b_outer);
663   return region_a;
664 }
665 
666 static int
t2r_eq(const void * pentry,const void * pdata)667 t2r_eq (const void *pentry, const void *pdata)
668 {
669   const_tree const entry = (const_tree) pentry;
670   const_tree const data = (const_tree) pdata;
671 
672   return TREE_PURPOSE (entry) == data;
673 }
674 
675 static hashval_t
t2r_hash(const void * pentry)676 t2r_hash (const void *pentry)
677 {
678   const_tree const entry = (const_tree) pentry;
679   return TREE_HASH (TREE_PURPOSE (entry));
680 }
681 
682 void
add_type_for_runtime(tree type)683 add_type_for_runtime (tree type)
684 {
685   tree *slot;
686 
687   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
688   if (TREE_CODE (type) == NOP_EXPR)
689     return;
690 
691   slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
692 					    TREE_HASH (type), INSERT);
693   if (*slot == NULL)
694     {
695       tree runtime = lang_hooks.eh_runtime_type (type);
696       *slot = tree_cons (type, runtime, NULL_TREE);
697     }
698 }
699 
700 tree
lookup_type_for_runtime(tree type)701 lookup_type_for_runtime (tree type)
702 {
703   tree *slot;
704 
705   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
706   if (TREE_CODE (type) == NOP_EXPR)
707     return type;
708 
709   slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
710 					    TREE_HASH (type), NO_INSERT);
711 
712   /* We should have always inserted the data earlier.  */
713   return TREE_VALUE (*slot);
714 }
715 
716 
717 /* Represent an entry in @TTypes for either catch actions
718    or exception filter actions.  */
719 struct ttypes_filter {
720   tree t;
721   int filter;
722 };
723 
724 /* Helper for ttypes_filter hashing.  */
725 
726 struct ttypes_filter_hasher : typed_free_remove <ttypes_filter>
727 {
728   typedef ttypes_filter value_type;
729   typedef tree_node compare_type;
730   static inline hashval_t hash (const value_type *);
731   static inline bool equal (const value_type *, const compare_type *);
732 };
733 
734 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
735    (a tree) for a @TTypes type node we are thinking about adding.  */
736 
737 inline bool
equal(const value_type * entry,const compare_type * data)738 ttypes_filter_hasher::equal (const value_type *entry, const compare_type *data)
739 {
740   return entry->t == data;
741 }
742 
743 inline hashval_t
hash(const value_type * entry)744 ttypes_filter_hasher::hash (const value_type *entry)
745 {
746   return TREE_HASH (entry->t);
747 }
748 
749 typedef hash_table <ttypes_filter_hasher> ttypes_hash_type;
750 
751 
752 /* Helper for ehspec hashing.  */
753 
754 struct ehspec_hasher : typed_free_remove <ttypes_filter>
755 {
756   typedef ttypes_filter value_type;
757   typedef ttypes_filter compare_type;
758   static inline hashval_t hash (const value_type *);
759   static inline bool equal (const value_type *, const compare_type *);
760 };
761 
762 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
763    exception specification list we are thinking about adding.  */
764 /* ??? Currently we use the type lists in the order given.  Someone
765    should put these in some canonical order.  */
766 
767 inline bool
equal(const value_type * entry,const compare_type * data)768 ehspec_hasher::equal (const value_type *entry, const compare_type *data)
769 {
770   return type_list_equal (entry->t, data->t);
771 }
772 
773 /* Hash function for exception specification lists.  */
774 
775 inline hashval_t
hash(const value_type * entry)776 ehspec_hasher::hash (const value_type *entry)
777 {
778   hashval_t h = 0;
779   tree list;
780 
781   for (list = entry->t; list ; list = TREE_CHAIN (list))
782     h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
783   return h;
784 }
785 
786 typedef hash_table <ehspec_hasher> ehspec_hash_type;
787 
788 
789 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
790    to speed up the search.  Return the filter value to be used.  */
791 
792 static int
add_ttypes_entry(ttypes_hash_type ttypes_hash,tree type)793 add_ttypes_entry (ttypes_hash_type ttypes_hash, tree type)
794 {
795   struct ttypes_filter **slot, *n;
796 
797   slot = ttypes_hash.find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
798 					  INSERT);
799 
800   if ((n = *slot) == NULL)
801     {
802       /* Filter value is a 1 based table index.  */
803 
804       n = XNEW (struct ttypes_filter);
805       n->t = type;
806       n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
807       *slot = n;
808 
809       vec_safe_push (cfun->eh->ttype_data, type);
810     }
811 
812   return n->filter;
813 }
814 
815 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
816    to speed up the search.  Return the filter value to be used.  */
817 
818 static int
add_ehspec_entry(ehspec_hash_type ehspec_hash,ttypes_hash_type ttypes_hash,tree list)819 add_ehspec_entry (ehspec_hash_type ehspec_hash, ttypes_hash_type ttypes_hash,
820 		  tree list)
821 {
822   struct ttypes_filter **slot, *n;
823   struct ttypes_filter dummy;
824 
825   dummy.t = list;
826   slot = ehspec_hash.find_slot (&dummy, INSERT);
827 
828   if ((n = *slot) == NULL)
829     {
830       int len;
831 
832       if (targetm.arm_eabi_unwinder)
833 	len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
834       else
835 	len = vec_safe_length (cfun->eh->ehspec_data.other);
836 
837       /* Filter value is a -1 based byte index into a uleb128 buffer.  */
838 
839       n = XNEW (struct ttypes_filter);
840       n->t = list;
841       n->filter = -(len + 1);
842       *slot = n;
843 
844       /* Generate a 0 terminated list of filter values.  */
845       for (; list ; list = TREE_CHAIN (list))
846 	{
847 	  if (targetm.arm_eabi_unwinder)
848 	    vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
849 	  else
850 	    {
851 	      /* Look up each type in the list and encode its filter
852 		 value as a uleb128.  */
853 	      push_uleb128 (&cfun->eh->ehspec_data.other,
854 			    add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
855 	    }
856 	}
857       if (targetm.arm_eabi_unwinder)
858 	vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
859       else
860 	vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
861     }
862 
863   return n->filter;
864 }
865 
866 /* Generate the action filter values to be used for CATCH and
867    ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
868    we use lots of landing pads, and so every type or list can share
869    the same filter value, which saves table space.  */
870 
871 void
assign_filter_values(void)872 assign_filter_values (void)
873 {
874   int i;
875   ttypes_hash_type ttypes;
876   ehspec_hash_type ehspec;
877   eh_region r;
878   eh_catch c;
879 
880   vec_alloc (cfun->eh->ttype_data, 16);
881   if (targetm.arm_eabi_unwinder)
882     vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
883   else
884     vec_alloc (cfun->eh->ehspec_data.other, 64);
885 
886   ttypes.create (31);
887   ehspec.create (31);
888 
889   for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
890     {
891       if (r == NULL)
892 	continue;
893 
894       switch (r->type)
895 	{
896 	case ERT_TRY:
897 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
898 	    {
899 	      /* Whatever type_list is (NULL or true list), we build a list
900 		 of filters for the region.  */
901 	      c->filter_list = NULL_TREE;
902 
903 	      if (c->type_list != NULL)
904 		{
905 		  /* Get a filter value for each of the types caught and store
906 		     them in the region's dedicated list.  */
907 		  tree tp_node = c->type_list;
908 
909 		  for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
910 		    {
911 		      int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
912 		      tree flt_node = build_int_cst (integer_type_node, flt);
913 
914 		      c->filter_list
915 			= tree_cons (NULL_TREE, flt_node, c->filter_list);
916 		    }
917 		}
918 	      else
919 		{
920 		  /* Get a filter value for the NULL list also since it
921 		     will need an action record anyway.  */
922 		  int flt = add_ttypes_entry (ttypes, NULL);
923 		  tree flt_node = build_int_cst (integer_type_node, flt);
924 
925 		  c->filter_list
926 		    = tree_cons (NULL_TREE, flt_node, NULL);
927 		}
928 	    }
929 	  break;
930 
931 	case ERT_ALLOWED_EXCEPTIONS:
932 	  r->u.allowed.filter
933 	    = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
934 	  break;
935 
936 	default:
937 	  break;
938 	}
939     }
940 
941   ttypes.dispose ();
942   ehspec.dispose ();
943 }
944 
945 /* Emit SEQ into basic block just before INSN (that is assumed to be
946    first instruction of some existing BB and return the newly
947    produced block.  */
948 static basic_block
emit_to_new_bb_before(rtx seq,rtx insn)949 emit_to_new_bb_before (rtx seq, rtx insn)
950 {
951   rtx last;
952   basic_block bb, prev_bb;
953   edge e;
954   edge_iterator ei;
955 
956   /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
957      call), we don't want it to go into newly created landing pad or other EH
958      construct.  */
959   for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
960     if (e->flags & EDGE_FALLTHRU)
961       force_nonfallthru (e);
962     else
963       ei_next (&ei);
964   last = emit_insn_before (seq, insn);
965   if (BARRIER_P (last))
966     last = PREV_INSN (last);
967   prev_bb = BLOCK_FOR_INSN (insn)->prev_bb;
968   bb = create_basic_block (seq, last, prev_bb);
969   update_bb_for_insn (bb);
970   bb->flags |= BB_SUPERBLOCK;
971   return bb;
972 }
973 
974 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
975    at the rtl level.  Emit the code required by the target at a landing
976    pad for the given region.  */
977 
978 void
expand_dw2_landing_pad_for_region(eh_region region)979 expand_dw2_landing_pad_for_region (eh_region region)
980 {
981 #ifdef HAVE_exception_receiver
982   if (HAVE_exception_receiver)
983     emit_insn (gen_exception_receiver ());
984   else
985 #endif
986 #ifdef HAVE_nonlocal_goto_receiver
987   if (HAVE_nonlocal_goto_receiver)
988     emit_insn (gen_nonlocal_goto_receiver ());
989   else
990 #endif
991     { /* Nothing */ }
992 
993   if (region->exc_ptr_reg)
994     emit_move_insn (region->exc_ptr_reg,
995 		    gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
996   if (region->filter_reg)
997     emit_move_insn (region->filter_reg,
998 		    gen_rtx_REG (targetm.eh_return_filter_mode (),
999 				 EH_RETURN_DATA_REGNO (1)));
1000 }
1001 
1002 /* Expand the extra code needed at landing pads for dwarf2 unwinding.  */
1003 
1004 static void
dw2_build_landing_pads(void)1005 dw2_build_landing_pads (void)
1006 {
1007   int i;
1008   eh_landing_pad lp;
1009   int e_flags = EDGE_FALLTHRU;
1010 
1011   /* If we're going to partition blocks, we need to be able to add
1012      new landing pads later, which means that we need to hold on to
1013      the post-landing-pad block.  Prevent it from being merged away.
1014      We'll remove this bit after partitioning.  */
1015   if (flag_reorder_blocks_and_partition)
1016     e_flags |= EDGE_PRESERVE;
1017 
1018   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1019     {
1020       basic_block bb;
1021       rtx seq;
1022       edge e;
1023 
1024       if (lp == NULL || lp->post_landing_pad == NULL)
1025 	continue;
1026 
1027       start_sequence ();
1028 
1029       lp->landing_pad = gen_label_rtx ();
1030       emit_label (lp->landing_pad);
1031       LABEL_PRESERVE_P (lp->landing_pad) = 1;
1032 
1033       expand_dw2_landing_pad_for_region (lp->region);
1034 
1035       seq = get_insns ();
1036       end_sequence ();
1037 
1038       bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1039       e = make_edge (bb, bb->next_bb, e_flags);
1040       e->count = bb->count;
1041       e->probability = REG_BR_PROB_BASE;
1042       if (current_loops)
1043 	{
1044 	  struct loop *loop = bb->next_bb->loop_father;
1045 	  /* If we created a pre-header block, add the new block to the
1046 	     outer loop, otherwise to the loop itself.  */
1047 	  if (bb->next_bb == loop->header)
1048 	    add_bb_to_loop (bb, loop_outer (loop));
1049 	  else
1050 	    add_bb_to_loop (bb, loop);
1051 	}
1052     }
1053 }
1054 
1055 
1056 static vec<int> sjlj_lp_call_site_index;
1057 
1058 /* Process all active landing pads.  Assign each one a compact dispatch
1059    index, and a call-site index.  */
1060 
1061 static int
sjlj_assign_call_site_values(void)1062 sjlj_assign_call_site_values (void)
1063 {
1064   action_hash_type ar_hash;
1065   int i, disp_index;
1066   eh_landing_pad lp;
1067 
1068   vec_alloc (crtl->eh.action_record_data, 64);
1069   ar_hash.create (31);
1070 
1071   disp_index = 0;
1072   call_site_base = 1;
1073   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1074     if (lp && lp->post_landing_pad)
1075       {
1076 	int action, call_site;
1077 
1078 	/* First: build the action table.  */
1079 	action = collect_one_action_chain (ar_hash, lp->region);
1080 
1081 	/* Next: assign call-site values.  If dwarf2 terms, this would be
1082 	   the region number assigned by convert_to_eh_region_ranges, but
1083 	   handles no-action and must-not-throw differently.  */
1084 	/* Map must-not-throw to otherwise unused call-site index 0.  */
1085 	if (action == -2)
1086 	  call_site = 0;
1087 	/* Map no-action to otherwise unused call-site index -1.  */
1088 	else if (action == -1)
1089 	  call_site = -1;
1090 	/* Otherwise, look it up in the table.  */
1091 	else
1092 	  call_site = add_call_site (GEN_INT (disp_index), action, 0);
1093 	sjlj_lp_call_site_index[i] = call_site;
1094 
1095 	disp_index++;
1096       }
1097 
1098   ar_hash.dispose ();
1099 
1100   return disp_index;
1101 }
1102 
1103 /* Emit code to record the current call-site index before every
1104    insn that can throw.  */
1105 
1106 static void
sjlj_mark_call_sites(void)1107 sjlj_mark_call_sites (void)
1108 {
1109   int last_call_site = -2;
1110   rtx insn, mem;
1111 
1112   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1113     {
1114       eh_landing_pad lp;
1115       eh_region r;
1116       bool nothrow;
1117       int this_call_site;
1118       rtx before, p;
1119 
1120       /* Reset value tracking at extended basic block boundaries.  */
1121       if (LABEL_P (insn))
1122 	last_call_site = -2;
1123 
1124       if (! INSN_P (insn))
1125 	continue;
1126 
1127       nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1128       if (nothrow)
1129 	continue;
1130       if (lp)
1131 	this_call_site = sjlj_lp_call_site_index[lp->index];
1132       else if (r == NULL)
1133 	{
1134 	  /* Calls (and trapping insns) without notes are outside any
1135 	     exception handling region in this function.  Mark them as
1136 	     no action.  */
1137 	  this_call_site = -1;
1138 	}
1139       else
1140 	{
1141 	  gcc_assert (r->type == ERT_MUST_NOT_THROW);
1142 	  this_call_site = 0;
1143 	}
1144 
1145       if (this_call_site != -1)
1146 	crtl->uses_eh_lsda = 1;
1147 
1148       if (this_call_site == last_call_site)
1149 	continue;
1150 
1151       /* Don't separate a call from it's argument loads.  */
1152       before = insn;
1153       if (CALL_P (insn))
1154 	before = find_first_parameter_load (insn, NULL_RTX);
1155 
1156       start_sequence ();
1157       mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1158 			    sjlj_fc_call_site_ofs);
1159       emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1160       p = get_insns ();
1161       end_sequence ();
1162 
1163       emit_insn_before (p, before);
1164       last_call_site = this_call_site;
1165     }
1166 }
1167 
1168 /* Construct the SjLj_Function_Context.  */
1169 
1170 static void
sjlj_emit_function_enter(rtx dispatch_label)1171 sjlj_emit_function_enter (rtx dispatch_label)
1172 {
1173   rtx fn_begin, fc, mem, seq;
1174   bool fn_begin_outside_block;
1175   rtx personality = get_personality_function (current_function_decl);
1176 
1177   fc = crtl->eh.sjlj_fc;
1178 
1179   start_sequence ();
1180 
1181   /* We're storing this libcall's address into memory instead of
1182      calling it directly.  Thus, we must call assemble_external_libcall
1183      here, as we can not depend on emit_library_call to do it for us.  */
1184   assemble_external_libcall (personality);
1185   mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1186   emit_move_insn (mem, personality);
1187 
1188   mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1189   if (crtl->uses_eh_lsda)
1190     {
1191       char buf[20];
1192       rtx sym;
1193 
1194       ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1195       sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1196       SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1197       emit_move_insn (mem, sym);
1198     }
1199   else
1200     emit_move_insn (mem, const0_rtx);
1201 
1202   if (dispatch_label)
1203     {
1204 #ifdef DONT_USE_BUILTIN_SETJMP
1205       rtx x;
1206       x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1207 				   TYPE_MODE (integer_type_node), 1,
1208 				   plus_constant (Pmode, XEXP (fc, 0),
1209 						  sjlj_fc_jbuf_ofs), Pmode);
1210 
1211       emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1212 			       TYPE_MODE (integer_type_node), 0,
1213 			       dispatch_label, REG_BR_PROB_BASE / 100);
1214 #else
1215       expand_builtin_setjmp_setup (plus_constant (Pmode, XEXP (fc, 0),
1216 						  sjlj_fc_jbuf_ofs),
1217 				   dispatch_label);
1218 #endif
1219     }
1220 
1221   emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1222 		     1, XEXP (fc, 0), Pmode);
1223 
1224   seq = get_insns ();
1225   end_sequence ();
1226 
1227   /* ??? Instead of doing this at the beginning of the function,
1228      do this in a block that is at loop level 0 and dominates all
1229      can_throw_internal instructions.  */
1230 
1231   fn_begin_outside_block = true;
1232   for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1233     if (NOTE_P (fn_begin))
1234       {
1235 	if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1236 	  break;
1237 	else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1238 	  fn_begin_outside_block = false;
1239       }
1240 
1241   if (fn_begin_outside_block)
1242     insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1243   else
1244     emit_insn_after (seq, fn_begin);
1245 }
1246 
1247 /* Call back from expand_function_end to know where we should put
1248    the call to unwind_sjlj_unregister_libfunc if needed.  */
1249 
1250 void
sjlj_emit_function_exit_after(rtx after)1251 sjlj_emit_function_exit_after (rtx after)
1252 {
1253   crtl->eh.sjlj_exit_after = after;
1254 }
1255 
1256 static void
sjlj_emit_function_exit(void)1257 sjlj_emit_function_exit (void)
1258 {
1259   rtx seq, insn;
1260 
1261   start_sequence ();
1262 
1263   emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1264 		     1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1265 
1266   seq = get_insns ();
1267   end_sequence ();
1268 
1269   /* ??? Really this can be done in any block at loop level 0 that
1270      post-dominates all can_throw_internal instructions.  This is
1271      the last possible moment.  */
1272 
1273   insn = crtl->eh.sjlj_exit_after;
1274   if (LABEL_P (insn))
1275     insn = NEXT_INSN (insn);
1276 
1277   emit_insn_after (seq, insn);
1278 }
1279 
1280 static void
sjlj_emit_dispatch_table(rtx dispatch_label,int num_dispatch)1281 sjlj_emit_dispatch_table (rtx dispatch_label, int num_dispatch)
1282 {
1283   enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1284   enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
1285   eh_landing_pad lp;
1286   rtx mem, seq, fc, before, exc_ptr_reg, filter_reg;
1287   rtx first_reachable_label;
1288   basic_block bb;
1289   eh_region r;
1290   edge e;
1291   int i, disp_index;
1292   vec<tree> dispatch_labels = vNULL;
1293 
1294   fc = crtl->eh.sjlj_fc;
1295 
1296   start_sequence ();
1297 
1298   emit_label (dispatch_label);
1299 
1300 #ifndef DONT_USE_BUILTIN_SETJMP
1301   expand_builtin_setjmp_receiver (dispatch_label);
1302 
1303   /* The caller of expand_builtin_setjmp_receiver is responsible for
1304      making sure that the label doesn't vanish.  The only other caller
1305      is the expander for __builtin_setjmp_receiver, which places this
1306      label on the nonlocal_goto_label list.  Since we're modeling these
1307      CFG edges more exactly, we can use the forced_labels list instead.  */
1308   LABEL_PRESERVE_P (dispatch_label) = 1;
1309   forced_labels
1310     = gen_rtx_EXPR_LIST (VOIDmode, dispatch_label, forced_labels);
1311 #endif
1312 
1313   /* Load up exc_ptr and filter values from the function context.  */
1314   mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1315   if (unwind_word_mode != ptr_mode)
1316     {
1317 #ifdef POINTERS_EXTEND_UNSIGNED
1318       mem = convert_memory_address (ptr_mode, mem);
1319 #else
1320       mem = convert_to_mode (ptr_mode, mem, 0);
1321 #endif
1322     }
1323   exc_ptr_reg = force_reg (ptr_mode, mem);
1324 
1325   mem = adjust_address (fc, unwind_word_mode,
1326 			sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1327   if (unwind_word_mode != filter_mode)
1328     mem = convert_to_mode (filter_mode, mem, 0);
1329   filter_reg = force_reg (filter_mode, mem);
1330 
1331   /* Jump to one of the directly reachable regions.  */
1332 
1333   disp_index = 0;
1334   first_reachable_label = NULL;
1335 
1336   /* If there's exactly one call site in the function, don't bother
1337      generating a switch statement.  */
1338   if (num_dispatch > 1)
1339     dispatch_labels.create (num_dispatch);
1340 
1341   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1342     if (lp && lp->post_landing_pad)
1343       {
1344 	rtx seq2, label;
1345 
1346 	start_sequence ();
1347 
1348 	lp->landing_pad = dispatch_label;
1349 
1350 	if (num_dispatch > 1)
1351 	  {
1352 	    tree t_label, case_elt, t;
1353 
1354 	    t_label = create_artificial_label (UNKNOWN_LOCATION);
1355 	    t = build_int_cst (integer_type_node, disp_index);
1356 	    case_elt = build_case_label (t, NULL, t_label);
1357 	    dispatch_labels.quick_push (case_elt);
1358 	    label = label_rtx (t_label);
1359 	  }
1360 	else
1361 	  label = gen_label_rtx ();
1362 
1363 	if (disp_index == 0)
1364 	  first_reachable_label = label;
1365 	emit_label (label);
1366 
1367 	r = lp->region;
1368 	if (r->exc_ptr_reg)
1369 	  emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1370 	if (r->filter_reg)
1371 	  emit_move_insn (r->filter_reg, filter_reg);
1372 
1373 	seq2 = get_insns ();
1374 	end_sequence ();
1375 
1376 	before = label_rtx (lp->post_landing_pad);
1377 	bb = emit_to_new_bb_before (seq2, before);
1378 	e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1379 	e->count = bb->count;
1380 	e->probability = REG_BR_PROB_BASE;
1381 	if (current_loops)
1382 	  {
1383 	    struct loop *loop = bb->next_bb->loop_father;
1384 	    /* If we created a pre-header block, add the new block to the
1385 	       outer loop, otherwise to the loop itself.  */
1386 	    if (bb->next_bb == loop->header)
1387 	      add_bb_to_loop (bb, loop_outer (loop));
1388 	    else
1389 	      add_bb_to_loop (bb, loop);
1390 	    /* ???  For multiple dispatches we will end up with edges
1391 	       from the loop tree root into this loop, making it a
1392 	       multiple-entry loop.  Discard all affected loops.  */
1393 	    if (num_dispatch > 1)
1394 	      {
1395 		for (loop = bb->loop_father;
1396 		     loop_outer (loop); loop = loop_outer (loop))
1397 		  {
1398 		    loop->header = NULL;
1399 		    loop->latch = NULL;
1400 		  }
1401 	      }
1402 	  }
1403 
1404 	disp_index++;
1405       }
1406   gcc_assert (disp_index == num_dispatch);
1407 
1408   if (num_dispatch > 1)
1409     {
1410       rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1411 				 sjlj_fc_call_site_ofs);
1412       expand_sjlj_dispatch_table (disp, dispatch_labels);
1413     }
1414 
1415   seq = get_insns ();
1416   end_sequence ();
1417 
1418   bb = emit_to_new_bb_before (seq, first_reachable_label);
1419   if (num_dispatch == 1)
1420     {
1421       e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1422       e->count = bb->count;
1423       e->probability = REG_BR_PROB_BASE;
1424       if (current_loops)
1425 	{
1426 	  struct loop *loop = bb->next_bb->loop_father;
1427 	  /* If we created a pre-header block, add the new block to the
1428 	     outer loop, otherwise to the loop itself.  */
1429 	  if (bb->next_bb == loop->header)
1430 	    add_bb_to_loop (bb, loop_outer (loop));
1431 	  else
1432 	    add_bb_to_loop (bb, loop);
1433 	}
1434     }
1435   else
1436     {
1437       /* We are not wiring up edges here, but as the dispatcher call
1438          is at function begin simply associate the block with the
1439 	 outermost (non-)loop.  */
1440       if (current_loops)
1441 	add_bb_to_loop (bb, current_loops->tree_root);
1442     }
1443 }
1444 
1445 static void
sjlj_build_landing_pads(void)1446 sjlj_build_landing_pads (void)
1447 {
1448   int num_dispatch;
1449 
1450   num_dispatch = vec_safe_length (cfun->eh->lp_array);
1451   if (num_dispatch == 0)
1452     return;
1453   sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1454 
1455   num_dispatch = sjlj_assign_call_site_values ();
1456   if (num_dispatch > 0)
1457     {
1458       rtx dispatch_label = gen_label_rtx ();
1459       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1460 					TYPE_MODE (sjlj_fc_type_node),
1461 					TYPE_ALIGN (sjlj_fc_type_node));
1462       crtl->eh.sjlj_fc
1463 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1464 			      int_size_in_bytes (sjlj_fc_type_node),
1465 			      align);
1466 
1467       sjlj_mark_call_sites ();
1468       sjlj_emit_function_enter (dispatch_label);
1469       sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1470       sjlj_emit_function_exit ();
1471     }
1472 
1473   /* If we do not have any landing pads, we may still need to register a
1474      personality routine and (empty) LSDA to handle must-not-throw regions.  */
1475   else if (function_needs_eh_personality (cfun) != eh_personality_none)
1476     {
1477       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1478 					TYPE_MODE (sjlj_fc_type_node),
1479 					TYPE_ALIGN (sjlj_fc_type_node));
1480       crtl->eh.sjlj_fc
1481 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1482 			      int_size_in_bytes (sjlj_fc_type_node),
1483 			      align);
1484 
1485       sjlj_mark_call_sites ();
1486       sjlj_emit_function_enter (NULL_RTX);
1487       sjlj_emit_function_exit ();
1488     }
1489 
1490   sjlj_lp_call_site_index.release ();
1491 }
1492 
1493 /* After initial rtl generation, call back to finish generating
1494    exception support code.  */
1495 
1496 void
finish_eh_generation(void)1497 finish_eh_generation (void)
1498 {
1499   basic_block bb;
1500 
1501   /* Construct the landing pads.  */
1502   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1503     sjlj_build_landing_pads ();
1504   else
1505     dw2_build_landing_pads ();
1506   break_superblocks ();
1507 
1508   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1509       /* Kludge for Alpha (see alpha_gp_save_rtx).  */
1510       || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1511     commit_edge_insertions ();
1512 
1513   /* Redirect all EH edges from the post_landing_pad to the landing pad.  */
1514   FOR_EACH_BB_FN (bb, cfun)
1515     {
1516       eh_landing_pad lp;
1517       edge_iterator ei;
1518       edge e;
1519 
1520       lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1521 
1522       FOR_EACH_EDGE (e, ei, bb->succs)
1523 	if (e->flags & EDGE_EH)
1524 	  break;
1525 
1526       /* We should not have generated any new throwing insns during this
1527 	 pass, and we should not have lost any EH edges, so we only need
1528 	 to handle two cases here:
1529 	 (1) reachable handler and an existing edge to post-landing-pad,
1530 	 (2) no reachable handler and no edge.  */
1531       gcc_assert ((lp != NULL) == (e != NULL));
1532       if (lp != NULL)
1533 	{
1534 	  gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1535 
1536 	  redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1537 	  e->flags |= (CALL_P (BB_END (bb))
1538 		       ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1539 		       : EDGE_ABNORMAL);
1540 	}
1541     }
1542 }
1543 
1544 /* This section handles removing dead code for flow.  */
1545 
1546 void
remove_eh_landing_pad(eh_landing_pad lp)1547 remove_eh_landing_pad (eh_landing_pad lp)
1548 {
1549   eh_landing_pad *pp;
1550 
1551   for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1552     continue;
1553   *pp = lp->next_lp;
1554 
1555   if (lp->post_landing_pad)
1556     EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1557   (*cfun->eh->lp_array)[lp->index] = NULL;
1558 }
1559 
1560 /* Splice the EH region at PP from the region tree.  */
1561 
1562 static void
remove_eh_handler_splicer(eh_region * pp)1563 remove_eh_handler_splicer (eh_region *pp)
1564 {
1565   eh_region region = *pp;
1566   eh_landing_pad lp;
1567 
1568   for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1569     {
1570       if (lp->post_landing_pad)
1571 	EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1572       (*cfun->eh->lp_array)[lp->index] = NULL;
1573     }
1574 
1575   if (region->inner)
1576     {
1577       eh_region p, outer;
1578       outer = region->outer;
1579 
1580       *pp = p = region->inner;
1581       do
1582 	{
1583 	  p->outer = outer;
1584 	  pp = &p->next_peer;
1585 	  p = *pp;
1586 	}
1587       while (p);
1588     }
1589   *pp = region->next_peer;
1590 
1591   (*cfun->eh->region_array)[region->index] = NULL;
1592 }
1593 
1594 /* Splice a single EH region REGION from the region tree.
1595 
1596    To unlink REGION, we need to find the pointer to it with a relatively
1597    expensive search in REGION's outer region.  If you are going to
1598    remove a number of handlers, using remove_unreachable_eh_regions may
1599    be a better option.  */
1600 
1601 void
remove_eh_handler(eh_region region)1602 remove_eh_handler (eh_region region)
1603 {
1604   eh_region *pp, *pp_start, p, outer;
1605 
1606   outer = region->outer;
1607   if (outer)
1608     pp_start = &outer->inner;
1609   else
1610     pp_start = &cfun->eh->region_tree;
1611   for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1612     continue;
1613 
1614   remove_eh_handler_splicer (pp);
1615 }
1616 
1617 /* Worker for remove_unreachable_eh_regions.
1618    PP is a pointer to the region to start a region tree depth-first
1619    search from.  R_REACHABLE is the set of regions that have to be
1620    preserved.  */
1621 
1622 static void
remove_unreachable_eh_regions_worker(eh_region * pp,sbitmap r_reachable)1623 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1624 {
1625   while (*pp)
1626     {
1627       eh_region region = *pp;
1628       remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1629       if (!bitmap_bit_p (r_reachable, region->index))
1630 	remove_eh_handler_splicer (pp);
1631       else
1632 	pp = &region->next_peer;
1633     }
1634 }
1635 
1636 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1637    Do this by traversing the EH tree top-down and splice out regions that
1638    are not marked.  By removing regions from the leaves, we avoid costly
1639    searches in the region tree.  */
1640 
1641 void
remove_unreachable_eh_regions(sbitmap r_reachable)1642 remove_unreachable_eh_regions (sbitmap r_reachable)
1643 {
1644   remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1645 }
1646 
1647 /* Invokes CALLBACK for every exception handler landing pad label.
1648    Only used by reload hackery; should not be used by new code.  */
1649 
1650 void
for_each_eh_label(void (* callback)(rtx))1651 for_each_eh_label (void (*callback) (rtx))
1652 {
1653   eh_landing_pad lp;
1654   int i;
1655 
1656   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1657     {
1658       if (lp)
1659 	{
1660 	  rtx lab = lp->landing_pad;
1661 	  if (lab && LABEL_P (lab))
1662 	    (*callback) (lab);
1663 	}
1664     }
1665 }
1666 
1667 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1668    call insn.
1669 
1670    At the gimple level, we use LP_NR
1671        > 0 : The statement transfers to landing pad LP_NR
1672        = 0 : The statement is outside any EH region
1673        < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1674 
1675    At the rtl level, we use LP_NR
1676        > 0 : The insn transfers to landing pad LP_NR
1677        = 0 : The insn cannot throw
1678        < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1679        = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1680        missing note: The insn is outside any EH region.
1681 
1682   ??? This difference probably ought to be avoided.  We could stand
1683   to record nothrow for arbitrary gimple statements, and so avoid
1684   some moderately complex lookups in stmt_could_throw_p.  Perhaps
1685   NOTHROW should be mapped on both sides to INT_MIN.  Perhaps the
1686   no-nonlocal-goto property should be recorded elsewhere as a bit
1687   on the call_insn directly.  Perhaps we should make more use of
1688   attaching the trees to call_insns (reachable via symbol_ref in
1689   direct call cases) and just pull the data out of the trees.  */
1690 
1691 void
make_reg_eh_region_note(rtx insn,int ecf_flags,int lp_nr)1692 make_reg_eh_region_note (rtx insn, int ecf_flags, int lp_nr)
1693 {
1694   rtx value;
1695   if (ecf_flags & ECF_NOTHROW)
1696     value = const0_rtx;
1697   else if (lp_nr != 0)
1698     value = GEN_INT (lp_nr);
1699   else
1700     return;
1701   add_reg_note (insn, REG_EH_REGION, value);
1702 }
1703 
1704 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1705    nor perform a non-local goto.  Replace the region note if it
1706    already exists.  */
1707 
1708 void
make_reg_eh_region_note_nothrow_nononlocal(rtx insn)1709 make_reg_eh_region_note_nothrow_nononlocal (rtx insn)
1710 {
1711   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1712   rtx intmin = GEN_INT (INT_MIN);
1713 
1714   if (note != 0)
1715     XEXP (note, 0) = intmin;
1716   else
1717     add_reg_note (insn, REG_EH_REGION, intmin);
1718 }
1719 
1720 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1721    to the contrary.  */
1722 
1723 bool
insn_could_throw_p(const_rtx insn)1724 insn_could_throw_p (const_rtx insn)
1725 {
1726   if (!flag_exceptions)
1727     return false;
1728   if (CALL_P (insn))
1729     return true;
1730   if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1731     return may_trap_p (PATTERN (insn));
1732   return false;
1733 }
1734 
1735 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1736    at FIRST and ending at LAST.  NOTE_OR_INSN is either the source insn
1737    to look for a note, or the note itself.  */
1738 
1739 void
copy_reg_eh_region_note_forward(rtx note_or_insn,rtx first,rtx last)1740 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
1741 {
1742   rtx insn, note = note_or_insn;
1743 
1744   if (INSN_P (note_or_insn))
1745     {
1746       note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1747       if (note == NULL)
1748 	return;
1749     }
1750   note = XEXP (note, 0);
1751 
1752   for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1753     if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1754         && insn_could_throw_p (insn))
1755       add_reg_note (insn, REG_EH_REGION, note);
1756 }
1757 
1758 /* Likewise, but iterate backward.  */
1759 
1760 void
copy_reg_eh_region_note_backward(rtx note_or_insn,rtx last,rtx first)1761 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx last, rtx first)
1762 {
1763   rtx insn, note = note_or_insn;
1764 
1765   if (INSN_P (note_or_insn))
1766     {
1767       note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1768       if (note == NULL)
1769 	return;
1770     }
1771   note = XEXP (note, 0);
1772 
1773   for (insn = last; insn != first; insn = PREV_INSN (insn))
1774     if (insn_could_throw_p (insn))
1775       add_reg_note (insn, REG_EH_REGION, note);
1776 }
1777 
1778 
1779 /* Extract all EH information from INSN.  Return true if the insn
1780    was marked NOTHROW.  */
1781 
1782 static bool
get_eh_region_and_lp_from_rtx(const_rtx insn,eh_region * pr,eh_landing_pad * plp)1783 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1784 			       eh_landing_pad *plp)
1785 {
1786   eh_landing_pad lp = NULL;
1787   eh_region r = NULL;
1788   bool ret = false;
1789   rtx note;
1790   int lp_nr;
1791 
1792   if (! INSN_P (insn))
1793     goto egress;
1794 
1795   if (NONJUMP_INSN_P (insn)
1796       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1797     insn = XVECEXP (PATTERN (insn), 0, 0);
1798 
1799   note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1800   if (!note)
1801     {
1802       ret = !insn_could_throw_p (insn);
1803       goto egress;
1804     }
1805 
1806   lp_nr = INTVAL (XEXP (note, 0));
1807   if (lp_nr == 0 || lp_nr == INT_MIN)
1808     {
1809       ret = true;
1810       goto egress;
1811     }
1812 
1813   if (lp_nr < 0)
1814     r = (*cfun->eh->region_array)[-lp_nr];
1815   else
1816     {
1817       lp = (*cfun->eh->lp_array)[lp_nr];
1818       r = lp->region;
1819     }
1820 
1821  egress:
1822   *plp = lp;
1823   *pr = r;
1824   return ret;
1825 }
1826 
1827 /* Return the landing pad to which INSN may go, or NULL if it does not
1828    have a reachable landing pad within this function.  */
1829 
1830 eh_landing_pad
get_eh_landing_pad_from_rtx(const_rtx insn)1831 get_eh_landing_pad_from_rtx (const_rtx insn)
1832 {
1833   eh_landing_pad lp;
1834   eh_region r;
1835 
1836   get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1837   return lp;
1838 }
1839 
1840 /* Return the region to which INSN may go, or NULL if it does not
1841    have a reachable region within this function.  */
1842 
1843 eh_region
get_eh_region_from_rtx(const_rtx insn)1844 get_eh_region_from_rtx (const_rtx insn)
1845 {
1846   eh_landing_pad lp;
1847   eh_region r;
1848 
1849   get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1850   return r;
1851 }
1852 
1853 /* Return true if INSN throws and is caught by something in this function.  */
1854 
1855 bool
can_throw_internal(const_rtx insn)1856 can_throw_internal (const_rtx insn)
1857 {
1858   return get_eh_landing_pad_from_rtx (insn) != NULL;
1859 }
1860 
1861 /* Return true if INSN throws and escapes from the current function.  */
1862 
1863 bool
can_throw_external(const_rtx insn)1864 can_throw_external (const_rtx insn)
1865 {
1866   eh_landing_pad lp;
1867   eh_region r;
1868   bool nothrow;
1869 
1870   if (! INSN_P (insn))
1871     return false;
1872 
1873   if (NONJUMP_INSN_P (insn)
1874       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1875     {
1876       rtx seq = PATTERN (insn);
1877       int i, n = XVECLEN (seq, 0);
1878 
1879       for (i = 0; i < n; i++)
1880 	if (can_throw_external (XVECEXP (seq, 0, i)))
1881 	  return true;
1882 
1883       return false;
1884     }
1885 
1886   nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1887 
1888   /* If we can't throw, we obviously can't throw external.  */
1889   if (nothrow)
1890     return false;
1891 
1892   /* If we have an internal landing pad, then we're not external.  */
1893   if (lp != NULL)
1894     return false;
1895 
1896   /* If we're not within an EH region, then we are external.  */
1897   if (r == NULL)
1898     return true;
1899 
1900   /* The only thing that ought to be left is MUST_NOT_THROW regions,
1901      which don't always have landing pads.  */
1902   gcc_assert (r->type == ERT_MUST_NOT_THROW);
1903   return false;
1904 }
1905 
1906 /* Return true if INSN cannot throw at all.  */
1907 
1908 bool
insn_nothrow_p(const_rtx insn)1909 insn_nothrow_p (const_rtx insn)
1910 {
1911   eh_landing_pad lp;
1912   eh_region r;
1913 
1914   if (! INSN_P (insn))
1915     return true;
1916 
1917   if (NONJUMP_INSN_P (insn)
1918       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1919     {
1920       rtx seq = PATTERN (insn);
1921       int i, n = XVECLEN (seq, 0);
1922 
1923       for (i = 0; i < n; i++)
1924 	if (!insn_nothrow_p (XVECEXP (seq, 0, i)))
1925 	  return false;
1926 
1927       return true;
1928     }
1929 
1930   return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1931 }
1932 
1933 /* Return true if INSN can perform a non-local goto.  */
1934 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION.  */
1935 
1936 bool
can_nonlocal_goto(const_rtx insn)1937 can_nonlocal_goto (const_rtx insn)
1938 {
1939   if (nonlocal_goto_handler_labels && CALL_P (insn))
1940     {
1941       rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1942       if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1943 	return true;
1944     }
1945   return false;
1946 }
1947 
1948 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls.  */
1949 
1950 static unsigned int
set_nothrow_function_flags(void)1951 set_nothrow_function_flags (void)
1952 {
1953   rtx insn;
1954 
1955   crtl->nothrow = 1;
1956 
1957   /* Assume crtl->all_throwers_are_sibcalls until we encounter
1958      something that can throw an exception.  We specifically exempt
1959      CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1960      and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
1961      is optimistic.  */
1962 
1963   crtl->all_throwers_are_sibcalls = 1;
1964 
1965   /* If we don't know that this implementation of the function will
1966      actually be used, then we must not set TREE_NOTHROW, since
1967      callers must not assume that this function does not throw.  */
1968   if (TREE_NOTHROW (current_function_decl))
1969     return 0;
1970 
1971   if (! flag_exceptions)
1972     return 0;
1973 
1974   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1975     if (can_throw_external (insn))
1976       {
1977         crtl->nothrow = 0;
1978 
1979 	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1980 	  {
1981 	    crtl->all_throwers_are_sibcalls = 0;
1982 	    return 0;
1983 	  }
1984       }
1985 
1986   if (crtl->nothrow
1987       && (cgraph_function_body_availability (cgraph_get_node
1988 					     (current_function_decl))
1989           >= AVAIL_AVAILABLE))
1990     {
1991       struct cgraph_node *node = cgraph_get_node (current_function_decl);
1992       struct cgraph_edge *e;
1993       for (e = node->callers; e; e = e->next_caller)
1994         e->can_throw_external = false;
1995       cgraph_set_nothrow_flag (node, true);
1996 
1997       if (dump_file)
1998 	fprintf (dump_file, "Marking function nothrow: %s\n\n",
1999 		 current_function_name ());
2000     }
2001   return 0;
2002 }
2003 
2004 namespace {
2005 
2006 const pass_data pass_data_set_nothrow_function_flags =
2007 {
2008   RTL_PASS, /* type */
2009   "nothrow", /* name */
2010   OPTGROUP_NONE, /* optinfo_flags */
2011   false, /* has_gate */
2012   true, /* has_execute */
2013   TV_NONE, /* tv_id */
2014   0, /* properties_required */
2015   0, /* properties_provided */
2016   0, /* properties_destroyed */
2017   0, /* todo_flags_start */
2018   0, /* todo_flags_finish */
2019 };
2020 
2021 class pass_set_nothrow_function_flags : public rtl_opt_pass
2022 {
2023 public:
pass_set_nothrow_function_flags(gcc::context * ctxt)2024   pass_set_nothrow_function_flags (gcc::context *ctxt)
2025     : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2026   {}
2027 
2028   /* opt_pass methods: */
execute()2029   unsigned int execute () { return set_nothrow_function_flags (); }
2030 
2031 }; // class pass_set_nothrow_function_flags
2032 
2033 } // anon namespace
2034 
2035 rtl_opt_pass *
make_pass_set_nothrow_function_flags(gcc::context * ctxt)2036 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2037 {
2038   return new pass_set_nothrow_function_flags (ctxt);
2039 }
2040 
2041 
2042 /* Various hooks for unwind library.  */
2043 
2044 /* Expand the EH support builtin functions:
2045    __builtin_eh_pointer and __builtin_eh_filter.  */
2046 
2047 static eh_region
expand_builtin_eh_common(tree region_nr_t)2048 expand_builtin_eh_common (tree region_nr_t)
2049 {
2050   HOST_WIDE_INT region_nr;
2051   eh_region region;
2052 
2053   gcc_assert (tree_fits_shwi_p (region_nr_t));
2054   region_nr = tree_to_shwi (region_nr_t);
2055 
2056   region = (*cfun->eh->region_array)[region_nr];
2057 
2058   /* ??? We shouldn't have been able to delete a eh region without
2059      deleting all the code that depended on it.  */
2060   gcc_assert (region != NULL);
2061 
2062   return region;
2063 }
2064 
2065 /* Expand to the exc_ptr value from the given eh region.  */
2066 
2067 rtx
expand_builtin_eh_pointer(tree exp)2068 expand_builtin_eh_pointer (tree exp)
2069 {
2070   eh_region region
2071     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2072   if (region->exc_ptr_reg == NULL)
2073     region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2074   return region->exc_ptr_reg;
2075 }
2076 
2077 /* Expand to the filter value from the given eh region.  */
2078 
2079 rtx
expand_builtin_eh_filter(tree exp)2080 expand_builtin_eh_filter (tree exp)
2081 {
2082   eh_region region
2083     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2084   if (region->filter_reg == NULL)
2085     region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2086   return region->filter_reg;
2087 }
2088 
2089 /* Copy the exc_ptr and filter values from one landing pad's registers
2090    to another.  This is used to inline the resx statement.  */
2091 
2092 rtx
expand_builtin_eh_copy_values(tree exp)2093 expand_builtin_eh_copy_values (tree exp)
2094 {
2095   eh_region dst
2096     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2097   eh_region src
2098     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2099   enum machine_mode fmode = targetm.eh_return_filter_mode ();
2100 
2101   if (dst->exc_ptr_reg == NULL)
2102     dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2103   if (src->exc_ptr_reg == NULL)
2104     src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2105 
2106   if (dst->filter_reg == NULL)
2107     dst->filter_reg = gen_reg_rtx (fmode);
2108   if (src->filter_reg == NULL)
2109     src->filter_reg = gen_reg_rtx (fmode);
2110 
2111   emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2112   emit_move_insn (dst->filter_reg, src->filter_reg);
2113 
2114   return const0_rtx;
2115 }
2116 
2117 /* Do any necessary initialization to access arbitrary stack frames.
2118    On the SPARC, this means flushing the register windows.  */
2119 
2120 void
expand_builtin_unwind_init(void)2121 expand_builtin_unwind_init (void)
2122 {
2123   /* Set this so all the registers get saved in our frame; we need to be
2124      able to copy the saved values for any registers from frames we unwind.  */
2125   crtl->saves_all_registers = 1;
2126 
2127 #ifdef SETUP_FRAME_ADDRESSES
2128   SETUP_FRAME_ADDRESSES ();
2129 #endif
2130 }
2131 
2132 /* Map a non-negative number to an eh return data register number; expands
2133    to -1 if no return data register is associated with the input number.
2134    At least the inputs 0 and 1 must be mapped; the target may provide more.  */
2135 
2136 rtx
expand_builtin_eh_return_data_regno(tree exp)2137 expand_builtin_eh_return_data_regno (tree exp)
2138 {
2139   tree which = CALL_EXPR_ARG (exp, 0);
2140   unsigned HOST_WIDE_INT iwhich;
2141 
2142   if (TREE_CODE (which) != INTEGER_CST)
2143     {
2144       error ("argument of %<__builtin_eh_return_regno%> must be constant");
2145       return constm1_rtx;
2146     }
2147 
2148   iwhich = tree_to_uhwi (which);
2149   iwhich = EH_RETURN_DATA_REGNO (iwhich);
2150   if (iwhich == INVALID_REGNUM)
2151     return constm1_rtx;
2152 
2153 #ifdef DWARF_FRAME_REGNUM
2154   iwhich = DWARF_FRAME_REGNUM (iwhich);
2155 #else
2156   iwhich = DBX_REGISTER_NUMBER (iwhich);
2157 #endif
2158 
2159   return GEN_INT (iwhich);
2160 }
2161 
2162 /* Given a value extracted from the return address register or stack slot,
2163    return the actual address encoded in that value.  */
2164 
2165 rtx
expand_builtin_extract_return_addr(tree addr_tree)2166 expand_builtin_extract_return_addr (tree addr_tree)
2167 {
2168   rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2169 
2170   if (GET_MODE (addr) != Pmode
2171       && GET_MODE (addr) != VOIDmode)
2172     {
2173 #ifdef POINTERS_EXTEND_UNSIGNED
2174       addr = convert_memory_address (Pmode, addr);
2175 #else
2176       addr = convert_to_mode (Pmode, addr, 0);
2177 #endif
2178     }
2179 
2180   /* First mask out any unwanted bits.  */
2181 #ifdef MASK_RETURN_ADDR
2182   expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2183 #endif
2184 
2185   /* Then adjust to find the real return address.  */
2186 #if defined (RETURN_ADDR_OFFSET)
2187   addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2188 #endif
2189 
2190   return addr;
2191 }
2192 
2193 /* Given an actual address in addr_tree, do any necessary encoding
2194    and return the value to be stored in the return address register or
2195    stack slot so the epilogue will return to that address.  */
2196 
2197 rtx
expand_builtin_frob_return_addr(tree addr_tree)2198 expand_builtin_frob_return_addr (tree addr_tree)
2199 {
2200   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2201 
2202   addr = convert_memory_address (Pmode, addr);
2203 
2204 #ifdef RETURN_ADDR_OFFSET
2205   addr = force_reg (Pmode, addr);
2206   addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2207 #endif
2208 
2209   return addr;
2210 }
2211 
2212 /* Set up the epilogue with the magic bits we'll need to return to the
2213    exception handler.  */
2214 
2215 void
expand_builtin_eh_return(tree stackadj_tree ATTRIBUTE_UNUSED,tree handler_tree)2216 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2217 			  tree handler_tree)
2218 {
2219   rtx tmp;
2220 
2221 #ifdef EH_RETURN_STACKADJ_RTX
2222   tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2223 		     VOIDmode, EXPAND_NORMAL);
2224   tmp = convert_memory_address (Pmode, tmp);
2225   if (!crtl->eh.ehr_stackadj)
2226     crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2227   else if (tmp != crtl->eh.ehr_stackadj)
2228     emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2229 #endif
2230 
2231   tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2232 		     VOIDmode, EXPAND_NORMAL);
2233   tmp = convert_memory_address (Pmode, tmp);
2234   if (!crtl->eh.ehr_handler)
2235     crtl->eh.ehr_handler = copy_to_reg (tmp);
2236   else if (tmp != crtl->eh.ehr_handler)
2237     emit_move_insn (crtl->eh.ehr_handler, tmp);
2238 
2239   if (!crtl->eh.ehr_label)
2240     crtl->eh.ehr_label = gen_label_rtx ();
2241   emit_jump (crtl->eh.ehr_label);
2242 }
2243 
2244 /* Expand __builtin_eh_return.  This exit path from the function loads up
2245    the eh return data registers, adjusts the stack, and branches to a
2246    given PC other than the normal return address.  */
2247 
2248 void
expand_eh_return(void)2249 expand_eh_return (void)
2250 {
2251   rtx around_label;
2252 
2253   if (! crtl->eh.ehr_label)
2254     return;
2255 
2256   crtl->calls_eh_return = 1;
2257 
2258 #ifdef EH_RETURN_STACKADJ_RTX
2259   emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2260 #endif
2261 
2262   around_label = gen_label_rtx ();
2263   emit_jump (around_label);
2264 
2265   emit_label (crtl->eh.ehr_label);
2266   clobber_return_register ();
2267 
2268 #ifdef EH_RETURN_STACKADJ_RTX
2269   emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2270 #endif
2271 
2272 #ifdef HAVE_eh_return
2273   if (HAVE_eh_return)
2274     emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2275   else
2276 #endif
2277     {
2278 #ifdef EH_RETURN_HANDLER_RTX
2279       emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2280 #else
2281       error ("__builtin_eh_return not supported on this target");
2282 #endif
2283     }
2284 
2285   emit_label (around_label);
2286 }
2287 
2288 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2289    POINTERS_EXTEND_UNSIGNED and return it.  */
2290 
2291 rtx
expand_builtin_extend_pointer(tree addr_tree)2292 expand_builtin_extend_pointer (tree addr_tree)
2293 {
2294   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2295   int extend;
2296 
2297 #ifdef POINTERS_EXTEND_UNSIGNED
2298   extend = POINTERS_EXTEND_UNSIGNED;
2299 #else
2300   /* The previous EH code did an unsigned extend by default, so we do this also
2301      for consistency.  */
2302   extend = 1;
2303 #endif
2304 
2305   return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2306 }
2307 
2308 static int
add_action_record(action_hash_type ar_hash,int filter,int next)2309 add_action_record (action_hash_type ar_hash, int filter, int next)
2310 {
2311   struct action_record **slot, *new_ar, tmp;
2312 
2313   tmp.filter = filter;
2314   tmp.next = next;
2315   slot = ar_hash.find_slot (&tmp, INSERT);
2316 
2317   if ((new_ar = *slot) == NULL)
2318     {
2319       new_ar = XNEW (struct action_record);
2320       new_ar->offset = crtl->eh.action_record_data->length () + 1;
2321       new_ar->filter = filter;
2322       new_ar->next = next;
2323       *slot = new_ar;
2324 
2325       /* The filter value goes in untouched.  The link to the next
2326 	 record is a "self-relative" byte offset, or zero to indicate
2327 	 that there is no next record.  So convert the absolute 1 based
2328 	 indices we've been carrying around into a displacement.  */
2329 
2330       push_sleb128 (&crtl->eh.action_record_data, filter);
2331       if (next)
2332 	next -= crtl->eh.action_record_data->length () + 1;
2333       push_sleb128 (&crtl->eh.action_record_data, next);
2334     }
2335 
2336   return new_ar->offset;
2337 }
2338 
2339 static int
collect_one_action_chain(action_hash_type ar_hash,eh_region region)2340 collect_one_action_chain (action_hash_type ar_hash, eh_region region)
2341 {
2342   int next;
2343 
2344   /* If we've reached the top of the region chain, then we have
2345      no actions, and require no landing pad.  */
2346   if (region == NULL)
2347     return -1;
2348 
2349   switch (region->type)
2350     {
2351     case ERT_CLEANUP:
2352       {
2353 	eh_region r;
2354 	/* A cleanup adds a zero filter to the beginning of the chain, but
2355 	   there are special cases to look out for.  If there are *only*
2356 	   cleanups along a path, then it compresses to a zero action.
2357 	   Further, if there are multiple cleanups along a path, we only
2358 	   need to represent one of them, as that is enough to trigger
2359 	   entry to the landing pad at runtime.  */
2360 	next = collect_one_action_chain (ar_hash, region->outer);
2361 	if (next <= 0)
2362 	  return 0;
2363 	for (r = region->outer; r ; r = r->outer)
2364 	  if (r->type == ERT_CLEANUP)
2365 	    return next;
2366 	return add_action_record (ar_hash, 0, next);
2367       }
2368 
2369     case ERT_TRY:
2370       {
2371 	eh_catch c;
2372 
2373 	/* Process the associated catch regions in reverse order.
2374 	   If there's a catch-all handler, then we don't need to
2375 	   search outer regions.  Use a magic -3 value to record
2376 	   that we haven't done the outer search.  */
2377 	next = -3;
2378 	for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2379 	  {
2380 	    if (c->type_list == NULL)
2381 	      {
2382 		/* Retrieve the filter from the head of the filter list
2383 		   where we have stored it (see assign_filter_values).  */
2384 		int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2385 		next = add_action_record (ar_hash, filter, 0);
2386 	      }
2387 	    else
2388 	      {
2389 		/* Once the outer search is done, trigger an action record for
2390 		   each filter we have.  */
2391 		tree flt_node;
2392 
2393 		if (next == -3)
2394 		  {
2395 		    next = collect_one_action_chain (ar_hash, region->outer);
2396 
2397 		    /* If there is no next action, terminate the chain.  */
2398 		    if (next == -1)
2399 		      next = 0;
2400 		    /* If all outer actions are cleanups or must_not_throw,
2401 		       we'll have no action record for it, since we had wanted
2402 		       to encode these states in the call-site record directly.
2403 		       Add a cleanup action to the chain to catch these.  */
2404 		    else if (next <= 0)
2405 		      next = add_action_record (ar_hash, 0, 0);
2406 		  }
2407 
2408 		flt_node = c->filter_list;
2409 		for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2410 		  {
2411 		    int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2412 		    next = add_action_record (ar_hash, filter, next);
2413 		  }
2414 	      }
2415 	  }
2416 	return next;
2417       }
2418 
2419     case ERT_ALLOWED_EXCEPTIONS:
2420       /* An exception specification adds its filter to the
2421 	 beginning of the chain.  */
2422       next = collect_one_action_chain (ar_hash, region->outer);
2423 
2424       /* If there is no next action, terminate the chain.  */
2425       if (next == -1)
2426 	next = 0;
2427       /* If all outer actions are cleanups or must_not_throw,
2428 	 we'll have no action record for it, since we had wanted
2429 	 to encode these states in the call-site record directly.
2430 	 Add a cleanup action to the chain to catch these.  */
2431       else if (next <= 0)
2432 	next = add_action_record (ar_hash, 0, 0);
2433 
2434       return add_action_record (ar_hash, region->u.allowed.filter, next);
2435 
2436     case ERT_MUST_NOT_THROW:
2437       /* A must-not-throw region with no inner handlers or cleanups
2438 	 requires no call-site entry.  Note that this differs from
2439 	 the no handler or cleanup case in that we do require an lsda
2440 	 to be generated.  Return a magic -2 value to record this.  */
2441       return -2;
2442     }
2443 
2444   gcc_unreachable ();
2445 }
2446 
2447 static int
add_call_site(rtx landing_pad,int action,int section)2448 add_call_site (rtx landing_pad, int action, int section)
2449 {
2450   call_site_record record;
2451 
2452   record = ggc_alloc_call_site_record_d ();
2453   record->landing_pad = landing_pad;
2454   record->action = action;
2455 
2456   vec_safe_push (crtl->eh.call_site_record_v[section], record);
2457 
2458   return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2459 }
2460 
2461 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2462    The new note numbers will not refer to region numbers, but
2463    instead to call site entries.  */
2464 
2465 static unsigned int
convert_to_eh_region_ranges(void)2466 convert_to_eh_region_ranges (void)
2467 {
2468   rtx insn, iter, note;
2469   action_hash_type ar_hash;
2470   int last_action = -3;
2471   rtx last_action_insn = NULL_RTX;
2472   rtx last_landing_pad = NULL_RTX;
2473   rtx first_no_action_insn = NULL_RTX;
2474   int call_site = 0;
2475   int cur_sec = 0;
2476   rtx section_switch_note = NULL_RTX;
2477   rtx first_no_action_insn_before_switch = NULL_RTX;
2478   rtx last_no_action_insn_before_switch = NULL_RTX;
2479   int saved_call_site_base = call_site_base;
2480 
2481   vec_alloc (crtl->eh.action_record_data, 64);
2482 
2483   ar_hash.create (31);
2484 
2485   for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2486     if (INSN_P (iter))
2487       {
2488 	eh_landing_pad lp;
2489 	eh_region region;
2490 	bool nothrow;
2491 	int this_action;
2492 	rtx this_landing_pad;
2493 
2494 	insn = iter;
2495 	if (NONJUMP_INSN_P (insn)
2496 	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
2497 	  insn = XVECEXP (PATTERN (insn), 0, 0);
2498 
2499 	nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2500 	if (nothrow)
2501 	  continue;
2502 	if (region)
2503 	  this_action = collect_one_action_chain (ar_hash, region);
2504 	else
2505 	  this_action = -1;
2506 
2507 	/* Existence of catch handlers, or must-not-throw regions
2508 	   implies that an lsda is needed (even if empty).  */
2509 	if (this_action != -1)
2510 	  crtl->uses_eh_lsda = 1;
2511 
2512 	/* Delay creation of region notes for no-action regions
2513 	   until we're sure that an lsda will be required.  */
2514 	else if (last_action == -3)
2515 	  {
2516 	    first_no_action_insn = iter;
2517 	    last_action = -1;
2518 	  }
2519 
2520 	if (this_action >= 0)
2521 	  this_landing_pad = lp->landing_pad;
2522 	else
2523 	  this_landing_pad = NULL_RTX;
2524 
2525 	/* Differing actions or landing pads implies a change in call-site
2526 	   info, which implies some EH_REGION note should be emitted.  */
2527 	if (last_action != this_action
2528 	    || last_landing_pad != this_landing_pad)
2529 	  {
2530 	    /* If there is a queued no-action region in the other section
2531 	       with hot/cold partitioning, emit it now.  */
2532 	    if (first_no_action_insn_before_switch)
2533 	      {
2534 		gcc_assert (this_action != -1
2535 			    && last_action == (first_no_action_insn
2536 					       ? -1 : -3));
2537 		call_site = add_call_site (NULL_RTX, 0, 0);
2538 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2539 					 first_no_action_insn_before_switch);
2540 		NOTE_EH_HANDLER (note) = call_site;
2541 		note = emit_note_after (NOTE_INSN_EH_REGION_END,
2542 					last_no_action_insn_before_switch);
2543 		NOTE_EH_HANDLER (note) = call_site;
2544 		gcc_assert (last_action != -3
2545 			    || (last_action_insn
2546 				== last_no_action_insn_before_switch));
2547 		first_no_action_insn_before_switch = NULL_RTX;
2548 		last_no_action_insn_before_switch = NULL_RTX;
2549 		call_site_base++;
2550 	      }
2551 	    /* If we'd not seen a previous action (-3) or the previous
2552 	       action was must-not-throw (-2), then we do not need an
2553 	       end note.  */
2554 	    if (last_action >= -1)
2555 	      {
2556 		/* If we delayed the creation of the begin, do it now.  */
2557 		if (first_no_action_insn)
2558 		  {
2559 		    call_site = add_call_site (NULL_RTX, 0, cur_sec);
2560 		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2561 					     first_no_action_insn);
2562 		    NOTE_EH_HANDLER (note) = call_site;
2563 		    first_no_action_insn = NULL_RTX;
2564 		  }
2565 
2566 		note = emit_note_after (NOTE_INSN_EH_REGION_END,
2567 					last_action_insn);
2568 		NOTE_EH_HANDLER (note) = call_site;
2569 	      }
2570 
2571 	    /* If the new action is must-not-throw, then no region notes
2572 	       are created.  */
2573 	    if (this_action >= -1)
2574 	      {
2575 		call_site = add_call_site (this_landing_pad,
2576 					   this_action < 0 ? 0 : this_action,
2577 					   cur_sec);
2578 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2579 		NOTE_EH_HANDLER (note) = call_site;
2580 	      }
2581 
2582 	    last_action = this_action;
2583 	    last_landing_pad = this_landing_pad;
2584 	  }
2585 	last_action_insn = iter;
2586       }
2587     else if (NOTE_P (iter)
2588 	     && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2589       {
2590 	gcc_assert (section_switch_note == NULL_RTX);
2591 	gcc_assert (flag_reorder_blocks_and_partition);
2592 	section_switch_note = iter;
2593 	if (first_no_action_insn)
2594 	  {
2595 	    first_no_action_insn_before_switch = first_no_action_insn;
2596 	    last_no_action_insn_before_switch = last_action_insn;
2597 	    first_no_action_insn = NULL_RTX;
2598 	    gcc_assert (last_action == -1);
2599 	    last_action = -3;
2600 	  }
2601 	/* Force closing of current EH region before section switch and
2602 	   opening a new one afterwards.  */
2603 	else if (last_action != -3)
2604 	  last_landing_pad = pc_rtx;
2605 	if (crtl->eh.call_site_record_v[cur_sec])
2606 	  call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2607 	cur_sec++;
2608 	gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2609 	vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2610       }
2611 
2612   if (last_action >= -1 && ! first_no_action_insn)
2613     {
2614       note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
2615       NOTE_EH_HANDLER (note) = call_site;
2616     }
2617 
2618   call_site_base = saved_call_site_base;
2619 
2620   ar_hash.dispose ();
2621   return 0;
2622 }
2623 
2624 static bool
gate_convert_to_eh_region_ranges(void)2625 gate_convert_to_eh_region_ranges (void)
2626 {
2627   /* Nothing to do for SJLJ exceptions or if no regions created.  */
2628   if (cfun->eh->region_tree == NULL)
2629     return false;
2630   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2631     return false;
2632   return true;
2633 }
2634 
2635 namespace {
2636 
2637 const pass_data pass_data_convert_to_eh_region_ranges =
2638 {
2639   RTL_PASS, /* type */
2640   "eh_ranges", /* name */
2641   OPTGROUP_NONE, /* optinfo_flags */
2642   true, /* has_gate */
2643   true, /* has_execute */
2644   TV_NONE, /* tv_id */
2645   0, /* properties_required */
2646   0, /* properties_provided */
2647   0, /* properties_destroyed */
2648   0, /* todo_flags_start */
2649   0, /* todo_flags_finish */
2650 };
2651 
2652 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2653 {
2654 public:
pass_convert_to_eh_region_ranges(gcc::context * ctxt)2655   pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2656     : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2657   {}
2658 
2659   /* opt_pass methods: */
gate()2660   bool gate () { return gate_convert_to_eh_region_ranges (); }
execute()2661   unsigned int execute () { return convert_to_eh_region_ranges (); }
2662 
2663 }; // class pass_convert_to_eh_region_ranges
2664 
2665 } // anon namespace
2666 
2667 rtl_opt_pass *
make_pass_convert_to_eh_region_ranges(gcc::context * ctxt)2668 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2669 {
2670   return new pass_convert_to_eh_region_ranges (ctxt);
2671 }
2672 
2673 static void
push_uleb128(vec<uchar,va_gc> ** data_area,unsigned int value)2674 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2675 {
2676   do
2677     {
2678       unsigned char byte = value & 0x7f;
2679       value >>= 7;
2680       if (value)
2681 	byte |= 0x80;
2682       vec_safe_push (*data_area, byte);
2683     }
2684   while (value);
2685 }
2686 
2687 static void
push_sleb128(vec<uchar,va_gc> ** data_area,int value)2688 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2689 {
2690   unsigned char byte;
2691   int more;
2692 
2693   do
2694     {
2695       byte = value & 0x7f;
2696       value >>= 7;
2697       more = ! ((value == 0 && (byte & 0x40) == 0)
2698 		|| (value == -1 && (byte & 0x40) != 0));
2699       if (more)
2700 	byte |= 0x80;
2701       vec_safe_push (*data_area, byte);
2702     }
2703   while (more);
2704 }
2705 
2706 
2707 #ifndef HAVE_AS_LEB128
2708 static int
dw2_size_of_call_site_table(int section)2709 dw2_size_of_call_site_table (int section)
2710 {
2711   int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2712   int size = n * (4 + 4 + 4);
2713   int i;
2714 
2715   for (i = 0; i < n; ++i)
2716     {
2717       struct call_site_record_d *cs =
2718 	(*crtl->eh.call_site_record_v[section])[i];
2719       size += size_of_uleb128 (cs->action);
2720     }
2721 
2722   return size;
2723 }
2724 
2725 static int
sjlj_size_of_call_site_table(void)2726 sjlj_size_of_call_site_table (void)
2727 {
2728   int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2729   int size = 0;
2730   int i;
2731 
2732   for (i = 0; i < n; ++i)
2733     {
2734       struct call_site_record_d *cs =
2735 	(*crtl->eh.call_site_record_v[0])[i];
2736       size += size_of_uleb128 (INTVAL (cs->landing_pad));
2737       size += size_of_uleb128 (cs->action);
2738     }
2739 
2740   return size;
2741 }
2742 #endif
2743 
2744 static void
dw2_output_call_site_table(int cs_format,int section)2745 dw2_output_call_site_table (int cs_format, int section)
2746 {
2747   int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2748   int i;
2749   const char *begin;
2750 
2751   if (section == 0)
2752     begin = current_function_func_begin_label;
2753   else if (first_function_block_is_cold)
2754     begin = crtl->subsections.hot_section_label;
2755   else
2756     begin = crtl->subsections.cold_section_label;
2757 
2758   for (i = 0; i < n; ++i)
2759     {
2760       struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2761       char reg_start_lab[32];
2762       char reg_end_lab[32];
2763       char landing_pad_lab[32];
2764 
2765       ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2766       ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2767 
2768       if (cs->landing_pad)
2769 	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2770 				     CODE_LABEL_NUMBER (cs->landing_pad));
2771 
2772       /* ??? Perhaps use insn length scaling if the assembler supports
2773 	 generic arithmetic.  */
2774       /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2775 	 data4 if the function is small enough.  */
2776       if (cs_format == DW_EH_PE_uleb128)
2777 	{
2778 	  dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2779 					"region %d start", i);
2780 	  dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2781 					"length");
2782 	  if (cs->landing_pad)
2783 	    dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2784 					  "landing pad");
2785 	  else
2786 	    dw2_asm_output_data_uleb128 (0, "landing pad");
2787 	}
2788       else
2789 	{
2790 	  dw2_asm_output_delta (4, reg_start_lab, begin,
2791 				"region %d start", i);
2792 	  dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2793 	  if (cs->landing_pad)
2794 	    dw2_asm_output_delta (4, landing_pad_lab, begin,
2795 				  "landing pad");
2796 	  else
2797 	    dw2_asm_output_data (4, 0, "landing pad");
2798 	}
2799       dw2_asm_output_data_uleb128 (cs->action, "action");
2800     }
2801 
2802   call_site_base += n;
2803 }
2804 
2805 static void
sjlj_output_call_site_table(void)2806 sjlj_output_call_site_table (void)
2807 {
2808   int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2809   int i;
2810 
2811   for (i = 0; i < n; ++i)
2812     {
2813       struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2814 
2815       dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2816 				   "region %d landing pad", i);
2817       dw2_asm_output_data_uleb128 (cs->action, "action");
2818     }
2819 
2820   call_site_base += n;
2821 }
2822 
2823 /* Switch to the section that should be used for exception tables.  */
2824 
2825 static void
switch_to_exception_section(const char * ARG_UNUSED (fnname))2826 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2827 {
2828   section *s;
2829 
2830   if (exception_section)
2831     s = exception_section;
2832   else
2833     {
2834       /* Compute the section and cache it into exception_section,
2835 	 unless it depends on the function name.  */
2836       if (targetm_common.have_named_sections)
2837 	{
2838 	  int flags;
2839 
2840 	  if (EH_TABLES_CAN_BE_READ_ONLY)
2841 	    {
2842 	      int tt_format =
2843 		ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2844 	      flags = ((! flag_pic
2845 			|| ((tt_format & 0x70) != DW_EH_PE_absptr
2846 			    && (tt_format & 0x70) != DW_EH_PE_aligned))
2847 		       ? 0 : SECTION_WRITE);
2848 	    }
2849 	  else
2850 	    flags = SECTION_WRITE;
2851 
2852 #ifdef HAVE_LD_EH_GC_SECTIONS
2853 	  if (flag_function_sections
2854 	      || (DECL_ONE_ONLY (current_function_decl) && HAVE_COMDAT_GROUP))
2855 	    {
2856 	      char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2857 	      /* The EH table must match the code section, so only mark
2858 		 it linkonce if we have COMDAT groups to tie them together.  */
2859 	      if (DECL_ONE_ONLY (current_function_decl) && HAVE_COMDAT_GROUP)
2860 		flags |= SECTION_LINKONCE;
2861 	      sprintf (section_name, ".gcc_except_table.%s", fnname);
2862 	      s = get_section (section_name, flags, current_function_decl);
2863 	      free (section_name);
2864 	    }
2865 	  else
2866 #endif
2867 	    exception_section
2868 	      = s = get_section (".gcc_except_table", flags, NULL);
2869 	}
2870       else
2871 	exception_section
2872 	  = s = flag_pic ? data_section : readonly_data_section;
2873     }
2874 
2875   switch_to_section (s);
2876 }
2877 
2878 
2879 /* Output a reference from an exception table to the type_info object TYPE.
2880    TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2881    the value.  */
2882 
2883 static void
output_ttype(tree type,int tt_format,int tt_format_size)2884 output_ttype (tree type, int tt_format, int tt_format_size)
2885 {
2886   rtx value;
2887   bool is_public = true;
2888 
2889   if (type == NULL_TREE)
2890     value = const0_rtx;
2891   else
2892     {
2893       /* FIXME lto.  pass_ipa_free_lang_data changes all types to
2894 	 runtime types so TYPE should already be a runtime type
2895 	 reference.  When pass_ipa_free_lang data is made a default
2896 	 pass, we can then remove the call to lookup_type_for_runtime
2897 	 below.  */
2898       if (TYPE_P (type))
2899 	type = lookup_type_for_runtime (type);
2900 
2901       value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2902 
2903       /* Let cgraph know that the rtti decl is used.  Not all of the
2904 	 paths below go through assemble_integer, which would take
2905 	 care of this for us.  */
2906       STRIP_NOPS (type);
2907       if (TREE_CODE (type) == ADDR_EXPR)
2908 	{
2909 	  type = TREE_OPERAND (type, 0);
2910 	  if (TREE_CODE (type) == VAR_DECL)
2911 	    is_public = TREE_PUBLIC (type);
2912 	}
2913       else
2914 	gcc_assert (TREE_CODE (type) == INTEGER_CST);
2915     }
2916 
2917   /* Allow the target to override the type table entry format.  */
2918   if (targetm.asm_out.ttype (value))
2919     return;
2920 
2921   if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2922     assemble_integer (value, tt_format_size,
2923 		      tt_format_size * BITS_PER_UNIT, 1);
2924   else
2925     dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2926 }
2927 
2928 static void
output_one_function_exception_table(int section)2929 output_one_function_exception_table (int section)
2930 {
2931   int tt_format, cs_format, lp_format, i;
2932 #ifdef HAVE_AS_LEB128
2933   char ttype_label[32];
2934   char cs_after_size_label[32];
2935   char cs_end_label[32];
2936 #else
2937   int call_site_len;
2938 #endif
2939   int have_tt_data;
2940   int tt_format_size = 0;
2941 
2942   have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
2943 		  || (targetm.arm_eabi_unwinder
2944 		      ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
2945 		      : vec_safe_length (cfun->eh->ehspec_data.other)));
2946 
2947   /* Indicate the format of the @TType entries.  */
2948   if (! have_tt_data)
2949     tt_format = DW_EH_PE_omit;
2950   else
2951     {
2952       tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2953 #ifdef HAVE_AS_LEB128
2954       ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2955 				   section ? "LLSDATTC" : "LLSDATT",
2956 				   current_function_funcdef_no);
2957 #endif
2958       tt_format_size = size_of_encoded_value (tt_format);
2959 
2960       assemble_align (tt_format_size * BITS_PER_UNIT);
2961     }
2962 
2963   targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2964 				  current_function_funcdef_no);
2965 
2966   /* The LSDA header.  */
2967 
2968   /* Indicate the format of the landing pad start pointer.  An omitted
2969      field implies @LPStart == @Start.  */
2970   /* Currently we always put @LPStart == @Start.  This field would
2971      be most useful in moving the landing pads completely out of
2972      line to another section, but it could also be used to minimize
2973      the size of uleb128 landing pad offsets.  */
2974   lp_format = DW_EH_PE_omit;
2975   dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
2976 		       eh_data_format_name (lp_format));
2977 
2978   /* @LPStart pointer would go here.  */
2979 
2980   dw2_asm_output_data (1, tt_format, "@TType format (%s)",
2981 		       eh_data_format_name (tt_format));
2982 
2983 #ifndef HAVE_AS_LEB128
2984   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2985     call_site_len = sjlj_size_of_call_site_table ();
2986   else
2987     call_site_len = dw2_size_of_call_site_table (section);
2988 #endif
2989 
2990   /* A pc-relative 4-byte displacement to the @TType data.  */
2991   if (have_tt_data)
2992     {
2993 #ifdef HAVE_AS_LEB128
2994       char ttype_after_disp_label[32];
2995       ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
2996 				   section ? "LLSDATTDC" : "LLSDATTD",
2997 				   current_function_funcdef_no);
2998       dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
2999 				    "@TType base offset");
3000       ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3001 #else
3002       /* Ug.  Alignment queers things.  */
3003       unsigned int before_disp, after_disp, last_disp, disp;
3004 
3005       before_disp = 1 + 1;
3006       after_disp = (1 + size_of_uleb128 (call_site_len)
3007 		    + call_site_len
3008 		    + vec_safe_length (crtl->eh.action_record_data)
3009 		    + (vec_safe_length (cfun->eh->ttype_data)
3010 		       * tt_format_size));
3011 
3012       disp = after_disp;
3013       do
3014 	{
3015 	  unsigned int disp_size, pad;
3016 
3017 	  last_disp = disp;
3018 	  disp_size = size_of_uleb128 (disp);
3019 	  pad = before_disp + disp_size + after_disp;
3020 	  if (pad % tt_format_size)
3021 	    pad = tt_format_size - (pad % tt_format_size);
3022 	  else
3023 	    pad = 0;
3024 	  disp = after_disp + pad;
3025 	}
3026       while (disp != last_disp);
3027 
3028       dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3029 #endif
3030     }
3031 
3032   /* Indicate the format of the call-site offsets.  */
3033 #ifdef HAVE_AS_LEB128
3034   cs_format = DW_EH_PE_uleb128;
3035 #else
3036   cs_format = DW_EH_PE_udata4;
3037 #endif
3038   dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3039 		       eh_data_format_name (cs_format));
3040 
3041 #ifdef HAVE_AS_LEB128
3042   ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3043 			       section ? "LLSDACSBC" : "LLSDACSB",
3044 			       current_function_funcdef_no);
3045   ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3046 			       section ? "LLSDACSEC" : "LLSDACSE",
3047 			       current_function_funcdef_no);
3048   dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3049 				"Call-site table length");
3050   ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3051   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3052     sjlj_output_call_site_table ();
3053   else
3054     dw2_output_call_site_table (cs_format, section);
3055   ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3056 #else
3057   dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3058   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3059     sjlj_output_call_site_table ();
3060   else
3061     dw2_output_call_site_table (cs_format, section);
3062 #endif
3063 
3064   /* ??? Decode and interpret the data for flag_debug_asm.  */
3065   {
3066     uchar uc;
3067     FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3068       dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3069   }
3070 
3071   if (have_tt_data)
3072     assemble_align (tt_format_size * BITS_PER_UNIT);
3073 
3074   i = vec_safe_length (cfun->eh->ttype_data);
3075   while (i-- > 0)
3076     {
3077       tree type = (*cfun->eh->ttype_data)[i];
3078       output_ttype (type, tt_format, tt_format_size);
3079     }
3080 
3081 #ifdef HAVE_AS_LEB128
3082   if (have_tt_data)
3083       ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3084 #endif
3085 
3086   /* ??? Decode and interpret the data for flag_debug_asm.  */
3087   if (targetm.arm_eabi_unwinder)
3088     {
3089       tree type;
3090       for (i = 0;
3091 	   vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3092 	output_ttype (type, tt_format, tt_format_size);
3093     }
3094   else
3095     {
3096       uchar uc;
3097       for (i = 0;
3098 	   vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3099 	dw2_asm_output_data (1, uc,
3100 			     i ? NULL : "Exception specification table");
3101     }
3102 }
3103 
3104 void
output_function_exception_table(const char * fnname)3105 output_function_exception_table (const char *fnname)
3106 {
3107   rtx personality = get_personality_function (current_function_decl);
3108 
3109   /* Not all functions need anything.  */
3110   if (! crtl->uses_eh_lsda)
3111     return;
3112 
3113   if (personality)
3114     {
3115       assemble_external_libcall (personality);
3116 
3117       if (targetm.asm_out.emit_except_personality)
3118 	targetm.asm_out.emit_except_personality (personality);
3119     }
3120 
3121   switch_to_exception_section (fnname);
3122 
3123   /* If the target wants a label to begin the table, emit it here.  */
3124   targetm.asm_out.emit_except_table_label (asm_out_file);
3125 
3126   output_one_function_exception_table (0);
3127   if (crtl->eh.call_site_record_v[1])
3128     output_one_function_exception_table (1);
3129 
3130   switch_to_section (current_function_section ());
3131 }
3132 
3133 void
set_eh_throw_stmt_table(struct function * fun,struct htab * table)3134 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3135 {
3136   fun->eh->throw_stmt_table = table;
3137 }
3138 
3139 htab_t
get_eh_throw_stmt_table(struct function * fun)3140 get_eh_throw_stmt_table (struct function *fun)
3141 {
3142   return fun->eh->throw_stmt_table;
3143 }
3144 
3145 /* Determine if the function needs an EH personality function.  */
3146 
3147 enum eh_personality_kind
function_needs_eh_personality(struct function * fn)3148 function_needs_eh_personality (struct function *fn)
3149 {
3150   enum eh_personality_kind kind = eh_personality_none;
3151   eh_region i;
3152 
3153   FOR_ALL_EH_REGION_FN (i, fn)
3154     {
3155       switch (i->type)
3156 	{
3157 	case ERT_CLEANUP:
3158 	  /* Can do with any personality including the generic C one.  */
3159 	  kind = eh_personality_any;
3160 	  break;
3161 
3162 	case ERT_TRY:
3163 	case ERT_ALLOWED_EXCEPTIONS:
3164 	  /* Always needs a EH personality function.  The generic C
3165 	     personality doesn't handle these even for empty type lists.  */
3166 	  return eh_personality_lang;
3167 
3168 	case ERT_MUST_NOT_THROW:
3169 	  /* Always needs a EH personality function.  The language may specify
3170 	     what abort routine that must be used, e.g. std::terminate.  */
3171 	  return eh_personality_lang;
3172 	}
3173     }
3174 
3175   return kind;
3176 }
3177 
3178 /* Dump EH information to OUT.  */
3179 
3180 void
dump_eh_tree(FILE * out,struct function * fun)3181 dump_eh_tree (FILE * out, struct function *fun)
3182 {
3183   eh_region i;
3184   int depth = 0;
3185   static const char *const type_name[] = {
3186     "cleanup", "try", "allowed_exceptions", "must_not_throw"
3187   };
3188 
3189   i = fun->eh->region_tree;
3190   if (!i)
3191     return;
3192 
3193   fprintf (out, "Eh tree:\n");
3194   while (1)
3195     {
3196       fprintf (out, "  %*s %i %s", depth * 2, "",
3197 	       i->index, type_name[(int) i->type]);
3198 
3199       if (i->landing_pads)
3200 	{
3201 	  eh_landing_pad lp;
3202 
3203 	  fprintf (out, " land:");
3204 	  if (current_ir_type () == IR_GIMPLE)
3205 	    {
3206 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3207 		{
3208 		  fprintf (out, "{%i,", lp->index);
3209 		  print_generic_expr (out, lp->post_landing_pad, 0);
3210 		  fputc ('}', out);
3211 		  if (lp->next_lp)
3212 		    fputc (',', out);
3213 		}
3214 	    }
3215 	  else
3216 	    {
3217 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3218 		{
3219 		  fprintf (out, "{%i,", lp->index);
3220 		  if (lp->landing_pad)
3221 		    fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3222 			     NOTE_P (lp->landing_pad) ? "(del)" : "");
3223 		  else
3224 		    fprintf (out, "(nil),");
3225 		  if (lp->post_landing_pad)
3226 		    {
3227 		      rtx lab = label_rtx (lp->post_landing_pad);
3228 		      fprintf (out, "%i%s}", INSN_UID (lab),
3229 			       NOTE_P (lab) ? "(del)" : "");
3230 		    }
3231 		  else
3232 		    fprintf (out, "(nil)}");
3233 		  if (lp->next_lp)
3234 		    fputc (',', out);
3235 		}
3236 	    }
3237 	}
3238 
3239       switch (i->type)
3240 	{
3241 	case ERT_CLEANUP:
3242 	case ERT_MUST_NOT_THROW:
3243 	  break;
3244 
3245 	case ERT_TRY:
3246 	  {
3247 	    eh_catch c;
3248 	    fprintf (out, " catch:");
3249 	    for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3250 	      {
3251 		fputc ('{', out);
3252 		if (c->label)
3253 		  {
3254 		    fprintf (out, "lab:");
3255 		    print_generic_expr (out, c->label, 0);
3256 		    fputc (';', out);
3257 		  }
3258 		print_generic_expr (out, c->type_list, 0);
3259 		fputc ('}', out);
3260 		if (c->next_catch)
3261 		  fputc (',', out);
3262 	      }
3263 	  }
3264 	  break;
3265 
3266 	case ERT_ALLOWED_EXCEPTIONS:
3267 	  fprintf (out, " filter :%i types:", i->u.allowed.filter);
3268 	  print_generic_expr (out, i->u.allowed.type_list, 0);
3269 	  break;
3270 	}
3271       fputc ('\n', out);
3272 
3273       /* If there are sub-regions, process them.  */
3274       if (i->inner)
3275 	i = i->inner, depth++;
3276       /* If there are peers, process them.  */
3277       else if (i->next_peer)
3278 	i = i->next_peer;
3279       /* Otherwise, step back up the tree to the next peer.  */
3280       else
3281 	{
3282 	  do
3283 	    {
3284 	      i = i->outer;
3285 	      depth--;
3286 	      if (i == NULL)
3287 		return;
3288 	    }
3289 	  while (i->next_peer == NULL);
3290 	  i = i->next_peer;
3291 	}
3292     }
3293 }
3294 
3295 /* Dump the EH tree for FN on stderr.  */
3296 
3297 DEBUG_FUNCTION void
debug_eh_tree(struct function * fn)3298 debug_eh_tree (struct function *fn)
3299 {
3300   dump_eh_tree (stderr, fn);
3301 }
3302 
3303 /* Verify invariants on EH datastructures.  */
3304 
3305 DEBUG_FUNCTION void
verify_eh_tree(struct function * fun)3306 verify_eh_tree (struct function *fun)
3307 {
3308   eh_region r, outer;
3309   int nvisited_lp, nvisited_r;
3310   int count_lp, count_r, depth, i;
3311   eh_landing_pad lp;
3312   bool err = false;
3313 
3314   if (!fun->eh->region_tree)
3315     return;
3316 
3317   count_r = 0;
3318   for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3319     if (r)
3320       {
3321 	if (r->index == i)
3322 	  count_r++;
3323 	else
3324 	  {
3325 	    error ("region_array is corrupted for region %i", r->index);
3326 	    err = true;
3327 	  }
3328       }
3329 
3330   count_lp = 0;
3331   for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3332     if (lp)
3333       {
3334 	if (lp->index == i)
3335 	  count_lp++;
3336 	else
3337 	  {
3338 	    error ("lp_array is corrupted for lp %i", lp->index);
3339 	    err = true;
3340 	  }
3341       }
3342 
3343   depth = nvisited_lp = nvisited_r = 0;
3344   outer = NULL;
3345   r = fun->eh->region_tree;
3346   while (1)
3347     {
3348       if ((*fun->eh->region_array)[r->index] != r)
3349 	{
3350 	  error ("region_array is corrupted for region %i", r->index);
3351 	  err = true;
3352 	}
3353       if (r->outer != outer)
3354 	{
3355 	  error ("outer block of region %i is wrong", r->index);
3356 	  err = true;
3357 	}
3358       if (depth < 0)
3359 	{
3360 	  error ("negative nesting depth of region %i", r->index);
3361 	  err = true;
3362 	}
3363       nvisited_r++;
3364 
3365       for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3366 	{
3367 	  if ((*fun->eh->lp_array)[lp->index] != lp)
3368 	    {
3369 	      error ("lp_array is corrupted for lp %i", lp->index);
3370 	      err = true;
3371 	    }
3372 	  if (lp->region != r)
3373 	    {
3374 	      error ("region of lp %i is wrong", lp->index);
3375 	      err = true;
3376 	    }
3377 	  nvisited_lp++;
3378 	}
3379 
3380       if (r->inner)
3381 	outer = r, r = r->inner, depth++;
3382       else if (r->next_peer)
3383 	r = r->next_peer;
3384       else
3385 	{
3386 	  do
3387 	    {
3388 	      r = r->outer;
3389 	      if (r == NULL)
3390 		goto region_done;
3391 	      depth--;
3392 	      outer = r->outer;
3393 	    }
3394 	  while (r->next_peer == NULL);
3395 	  r = r->next_peer;
3396 	}
3397     }
3398  region_done:
3399   if (depth != 0)
3400     {
3401       error ("tree list ends on depth %i", depth);
3402       err = true;
3403     }
3404   if (count_r != nvisited_r)
3405     {
3406       error ("region_array does not match region_tree");
3407       err = true;
3408     }
3409   if (count_lp != nvisited_lp)
3410     {
3411       error ("lp_array does not match region_tree");
3412       err = true;
3413     }
3414 
3415   if (err)
3416     {
3417       dump_eh_tree (stderr, fun);
3418       internal_error ("verify_eh_tree failed");
3419     }
3420 }
3421 
3422 #include "gt-except.h"
3423