1 /* Implements exception handling.
2    Copyright (C) 1989-2019 Free Software Foundation, Inc.
3    Contributed by Mike Stump <mrs@cygnus.com>.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 
22 /* An exception is an event that can be "thrown" from within a
23    function.  This event can then be "caught" by the callers of
24    the function.
25 
26    The representation of exceptions changes several times during
27    the compilation process:
28 
29    In the beginning, in the front end, we have the GENERIC trees
30    TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31    CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32 
33    During initial gimplification (gimplify.c) these are lowered
34    to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35    The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36    into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
37    conversion.
38 
39    During pass_lower_eh (tree-eh.c) we record the nested structure
40    of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41    We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42    regions at this time.  We can then flatten the statements within
43    the TRY nodes to straight-line code.  Statements that had been within
44    TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45    so that we may remember what action is supposed to be taken if
46    a given statement does throw.  During this lowering process,
47    we create an EH_LANDING_PAD node for each EH_REGION that has
48    some code within the function that needs to be executed if a
49    throw does happen.  We also create RESX statements that are
50    used to transfer control from an inner EH_REGION to an outer
51    EH_REGION.  We also create EH_DISPATCH statements as placeholders
52    for a runtime type comparison that should be made in order to
53    select the action to perform among different CATCH and EH_FILTER
54    regions.
55 
56    During pass_lower_eh_dispatch (tree-eh.c), which is run after
57    all inlining is complete, we are able to run assign_filter_values,
58    which allows us to map the set of types manipulated by all of the
59    CATCH and EH_FILTER regions to a set of integers.  This set of integers
60    will be how the exception runtime communicates with the code generated
61    within the function.  We then expand the GIMPLE_EH_DISPATCH statements
62    to a switch or conditional branches that use the argument provided by
63    the runtime (__builtin_eh_filter) and the set of integers we computed
64    in assign_filter_values.
65 
66    During pass_lower_resx (tree-eh.c), which is run near the end
67    of optimization, we expand RESX statements.  If the eh region
68    that is outer to the RESX statement is a MUST_NOT_THROW, then
69    the RESX expands to some form of abort statement.  If the eh
70    region that is outer to the RESX statement is within the current
71    function, then the RESX expands to a bookkeeping call
72    (__builtin_eh_copy_values) and a goto.  Otherwise, the next
73    handler for the exception must be within a function somewhere
74    up the call chain, so we call back into the exception runtime
75    (__builtin_unwind_resume).
76 
77    During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78    that create an rtl to eh_region mapping that corresponds to the
79    gimple to eh_region mapping that had been recorded in the
80    THROW_STMT_TABLE.
81 
82    Then, via finish_eh_generation, we generate the real landing pads
83    to which the runtime will actually transfer control.  These new
84    landing pads perform whatever bookkeeping is needed by the target
85    backend in order to resume execution within the current function.
86    Each of these new landing pads falls through into the post_landing_pad
87    label which had been used within the CFG up to this point.  All
88    exception edges within the CFG are redirected to the new landing pads.
89    If the target uses setjmp to implement exceptions, the various extra
90    calls into the runtime to register and unregister the current stack
91    frame are emitted at this time.
92 
93    During pass_convert_to_eh_region_ranges (except.c), we transform
94    the REG_EH_REGION notes attached to individual insns into
95    non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96    and NOTE_INSN_EH_REGION_END.  Each insn within such ranges has the
97    same associated action within the exception region tree, meaning
98    that (1) the exception is caught by the same landing pad within the
99    current function, (2) the exception is blocked by the runtime with
100    a MUST_NOT_THROW region, or (3) the exception is not handled at all
101    within the current function.
102 
103    Finally, during assembly generation, we call
104    output_function_exception_table (except.c) to emit the tables with
105    which the exception runtime can determine if a given stack frame
106    handles a given exception, and if so what filter value to provide
107    to the function when the non-local control transfer is effected.
108    If the target uses dwarf2 unwinding to implement exceptions, then
109    output_call_frame_info (dwarf2out.c) emits the required unwind data.  */
110 
111 
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "backend.h"
116 #include "target.h"
117 #include "rtl.h"
118 #include "tree.h"
119 #include "cfghooks.h"
120 #include "tree-pass.h"
121 #include "memmodel.h"
122 #include "tm_p.h"
123 #include "stringpool.h"
124 #include "expmed.h"
125 #include "optabs.h"
126 #include "emit-rtl.h"
127 #include "cgraph.h"
128 #include "diagnostic.h"
129 #include "fold-const.h"
130 #include "stor-layout.h"
131 #include "explow.h"
132 #include "stmt.h"
133 #include "expr.h"
134 #include "calls.h"
135 #include "libfuncs.h"
136 #include "except.h"
137 #include "output.h"
138 #include "dwarf2asm.h"
139 #include "dwarf2out.h"
140 #include "common/common-target.h"
141 #include "langhooks.h"
142 #include "cfgrtl.h"
143 #include "tree-pretty-print.h"
144 #include "cfgloop.h"
145 #include "builtins.h"
146 #include "tree-hash-traits.h"
147 
148 static GTY(()) int call_site_base;
149 
150 static GTY(()) hash_map<tree_hash, tree> *type_to_runtime_map;
151 
152 static GTY(()) tree setjmp_fn;
153 
154 /* Describe the SjLj_Function_Context structure.  */
155 static GTY(()) tree sjlj_fc_type_node;
156 static int sjlj_fc_call_site_ofs;
157 static int sjlj_fc_data_ofs;
158 static int sjlj_fc_personality_ofs;
159 static int sjlj_fc_lsda_ofs;
160 static int sjlj_fc_jbuf_ofs;
161 
162 
163 struct GTY(()) call_site_record_d
164 {
165   rtx landing_pad;
166   int action;
167 };
168 
169 /* In the following structure and associated functions,
170    we represent entries in the action table as 1-based indices.
171    Special cases are:
172 
173 	 0:	null action record, non-null landing pad; implies cleanups
174 	-1:	null action record, null landing pad; implies no action
175 	-2:	no call-site entry; implies must_not_throw
176 	-3:	we have yet to process outer regions
177 
178    Further, no special cases apply to the "next" field of the record.
179    For next, 0 means end of list.  */
180 
181 struct action_record
182 {
183   int offset;
184   int filter;
185   int next;
186 };
187 
188 /* Hashtable helpers.  */
189 
190 struct action_record_hasher : free_ptr_hash <action_record>
191 {
192   static inline hashval_t hash (const action_record *);
193   static inline bool equal (const action_record *, const action_record *);
194 };
195 
196 inline hashval_t
hash(const action_record * entry)197 action_record_hasher::hash (const action_record *entry)
198 {
199   return entry->next * 1009 + entry->filter;
200 }
201 
202 inline bool
equal(const action_record * entry,const action_record * data)203 action_record_hasher::equal (const action_record *entry,
204 			     const action_record *data)
205 {
206   return entry->filter == data->filter && entry->next == data->next;
207 }
208 
209 typedef hash_table<action_record_hasher> action_hash_type;
210 
211 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
212 					   eh_landing_pad *);
213 
214 static void dw2_build_landing_pads (void);
215 
216 static int collect_one_action_chain (action_hash_type *, eh_region);
217 static int add_call_site (rtx, int, int);
218 
219 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
220 static void push_sleb128 (vec<uchar, va_gc> **, int);
221 static int dw2_size_of_call_site_table (int);
222 static int sjlj_size_of_call_site_table (void);
223 static void dw2_output_call_site_table (int, int);
224 static void sjlj_output_call_site_table (void);
225 
226 
227 void
init_eh(void)228 init_eh (void)
229 {
230   if (! flag_exceptions)
231     return;
232 
233   type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
234 
235   /* Create the SjLj_Function_Context structure.  This should match
236      the definition in unwind-sjlj.c.  */
237   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
238     {
239       tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
240 
241       sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
242 
243       f_prev = build_decl (BUILTINS_LOCATION,
244 			   FIELD_DECL, get_identifier ("__prev"),
245 			   build_pointer_type (sjlj_fc_type_node));
246       DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
247 
248       f_cs = build_decl (BUILTINS_LOCATION,
249 			 FIELD_DECL, get_identifier ("__call_site"),
250 			 integer_type_node);
251       DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
252 
253       tmp = build_index_type (size_int (4 - 1));
254       tmp = build_array_type (lang_hooks.types.type_for_mode
255 				(targetm.unwind_word_mode (), 1),
256 			      tmp);
257       f_data = build_decl (BUILTINS_LOCATION,
258 			   FIELD_DECL, get_identifier ("__data"), tmp);
259       DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
260 
261       f_per = build_decl (BUILTINS_LOCATION,
262 			  FIELD_DECL, get_identifier ("__personality"),
263 			  ptr_type_node);
264       DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
265 
266       f_lsda = build_decl (BUILTINS_LOCATION,
267 			   FIELD_DECL, get_identifier ("__lsda"),
268 			   ptr_type_node);
269       DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
270 
271 #ifdef DONT_USE_BUILTIN_SETJMP
272 #ifdef JMP_BUF_SIZE
273       tmp = size_int (JMP_BUF_SIZE - 1);
274 #else
275       /* Should be large enough for most systems, if it is not,
276 	 JMP_BUF_SIZE should be defined with the proper value.  It will
277 	 also tend to be larger than necessary for most systems, a more
278 	 optimal port will define JMP_BUF_SIZE.  */
279       tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
280 #endif
281 #else
282       /* Compute a minimally sized jump buffer.  We need room to store at
283 	 least 3 pointers - stack pointer, frame pointer and return address.
284 	 Plus for some targets we need room for an extra pointer - in the
285 	 case of MIPS this is the global pointer.  This makes a total of four
286 	 pointers, but to be safe we actually allocate room for 5.
287 
288 	 If pointers are smaller than words then we allocate enough room for
289 	 5 words, just in case the backend needs this much room.  For more
290 	 discussion on this issue see:
291 	 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html.  */
292       if (POINTER_SIZE > BITS_PER_WORD)
293 	tmp = size_int (5 - 1);
294       else
295 	tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
296 #endif
297 
298       tmp = build_index_type (tmp);
299       tmp = build_array_type (ptr_type_node, tmp);
300       f_jbuf = build_decl (BUILTINS_LOCATION,
301 			   FIELD_DECL, get_identifier ("__jbuf"), tmp);
302 #ifdef DONT_USE_BUILTIN_SETJMP
303       /* We don't know what the alignment requirements of the
304 	 runtime's jmp_buf has.  Overestimate.  */
305       SET_DECL_ALIGN (f_jbuf, BIGGEST_ALIGNMENT);
306       DECL_USER_ALIGN (f_jbuf) = 1;
307 #endif
308       DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
309 
310       TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
311       TREE_CHAIN (f_prev) = f_cs;
312       TREE_CHAIN (f_cs) = f_data;
313       TREE_CHAIN (f_data) = f_per;
314       TREE_CHAIN (f_per) = f_lsda;
315       TREE_CHAIN (f_lsda) = f_jbuf;
316 
317       layout_type (sjlj_fc_type_node);
318 
319       /* Cache the interesting field offsets so that we have
320 	 easy access from rtl.  */
321       sjlj_fc_call_site_ofs
322 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
323 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
324       sjlj_fc_data_ofs
325 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
326 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
327       sjlj_fc_personality_ofs
328 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
329 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
330       sjlj_fc_lsda_ofs
331 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
332 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
333       sjlj_fc_jbuf_ofs
334 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
335 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
336 
337 #ifdef DONT_USE_BUILTIN_SETJMP
338       tmp = build_function_type_list (integer_type_node, TREE_TYPE (f_jbuf),
339 				      NULL);
340       setjmp_fn = build_decl (BUILTINS_LOCATION, FUNCTION_DECL,
341 			      get_identifier ("setjmp"), tmp);
342       TREE_PUBLIC (setjmp_fn) = 1;
343       DECL_EXTERNAL (setjmp_fn) = 1;
344       DECL_ASSEMBLER_NAME (setjmp_fn);
345 #endif
346     }
347 }
348 
349 void
init_eh_for_function(void)350 init_eh_for_function (void)
351 {
352   cfun->eh = ggc_cleared_alloc<eh_status> ();
353 
354   /* Make sure zero'th entries are used.  */
355   vec_safe_push (cfun->eh->region_array, (eh_region)0);
356   vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
357 }
358 
359 /* Routines to generate the exception tree somewhat directly.
360    These are used from tree-eh.c when processing exception related
361    nodes during tree optimization.  */
362 
363 static eh_region
gen_eh_region(enum eh_region_type type,eh_region outer)364 gen_eh_region (enum eh_region_type type, eh_region outer)
365 {
366   eh_region new_eh;
367 
368   /* Insert a new blank region as a leaf in the tree.  */
369   new_eh = ggc_cleared_alloc<eh_region_d> ();
370   new_eh->type = type;
371   new_eh->outer = outer;
372   if (outer)
373     {
374       new_eh->next_peer = outer->inner;
375       outer->inner = new_eh;
376     }
377   else
378     {
379       new_eh->next_peer = cfun->eh->region_tree;
380       cfun->eh->region_tree = new_eh;
381     }
382 
383   new_eh->index = vec_safe_length (cfun->eh->region_array);
384   vec_safe_push (cfun->eh->region_array, new_eh);
385 
386   /* Copy the language's notion of whether to use __cxa_end_cleanup.  */
387   if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
388     new_eh->use_cxa_end_cleanup = true;
389 
390   return new_eh;
391 }
392 
393 eh_region
gen_eh_region_cleanup(eh_region outer)394 gen_eh_region_cleanup (eh_region outer)
395 {
396   return gen_eh_region (ERT_CLEANUP, outer);
397 }
398 
399 eh_region
gen_eh_region_try(eh_region outer)400 gen_eh_region_try (eh_region outer)
401 {
402   return gen_eh_region (ERT_TRY, outer);
403 }
404 
405 eh_catch
gen_eh_region_catch(eh_region t,tree type_or_list)406 gen_eh_region_catch (eh_region t, tree type_or_list)
407 {
408   eh_catch c, l;
409   tree type_list, type_node;
410 
411   gcc_assert (t->type == ERT_TRY);
412 
413   /* Ensure to always end up with a type list to normalize further
414      processing, then register each type against the runtime types map.  */
415   type_list = type_or_list;
416   if (type_or_list)
417     {
418       if (TREE_CODE (type_or_list) != TREE_LIST)
419 	type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
420 
421       type_node = type_list;
422       for (; type_node; type_node = TREE_CHAIN (type_node))
423 	add_type_for_runtime (TREE_VALUE (type_node));
424     }
425 
426   c = ggc_cleared_alloc<eh_catch_d> ();
427   c->type_list = type_list;
428   l = t->u.eh_try.last_catch;
429   c->prev_catch = l;
430   if (l)
431     l->next_catch = c;
432   else
433     t->u.eh_try.first_catch = c;
434   t->u.eh_try.last_catch = c;
435 
436   return c;
437 }
438 
439 eh_region
gen_eh_region_allowed(eh_region outer,tree allowed)440 gen_eh_region_allowed (eh_region outer, tree allowed)
441 {
442   eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
443   region->u.allowed.type_list = allowed;
444 
445   for (; allowed ; allowed = TREE_CHAIN (allowed))
446     add_type_for_runtime (TREE_VALUE (allowed));
447 
448   return region;
449 }
450 
451 eh_region
gen_eh_region_must_not_throw(eh_region outer)452 gen_eh_region_must_not_throw (eh_region outer)
453 {
454   return gen_eh_region (ERT_MUST_NOT_THROW, outer);
455 }
456 
457 eh_landing_pad
gen_eh_landing_pad(eh_region region)458 gen_eh_landing_pad (eh_region region)
459 {
460   eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
461 
462   lp->next_lp = region->landing_pads;
463   lp->region = region;
464   lp->index = vec_safe_length (cfun->eh->lp_array);
465   region->landing_pads = lp;
466 
467   vec_safe_push (cfun->eh->lp_array, lp);
468 
469   return lp;
470 }
471 
472 eh_region
get_eh_region_from_number_fn(struct function * ifun,int i)473 get_eh_region_from_number_fn (struct function *ifun, int i)
474 {
475   return (*ifun->eh->region_array)[i];
476 }
477 
478 eh_region
get_eh_region_from_number(int i)479 get_eh_region_from_number (int i)
480 {
481   return get_eh_region_from_number_fn (cfun, i);
482 }
483 
484 eh_landing_pad
get_eh_landing_pad_from_number_fn(struct function * ifun,int i)485 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
486 {
487   return (*ifun->eh->lp_array)[i];
488 }
489 
490 eh_landing_pad
get_eh_landing_pad_from_number(int i)491 get_eh_landing_pad_from_number (int i)
492 {
493   return get_eh_landing_pad_from_number_fn (cfun, i);
494 }
495 
496 eh_region
get_eh_region_from_lp_number_fn(struct function * ifun,int i)497 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
498 {
499   if (i < 0)
500     return (*ifun->eh->region_array)[-i];
501   else if (i == 0)
502     return NULL;
503   else
504     {
505       eh_landing_pad lp;
506       lp = (*ifun->eh->lp_array)[i];
507       return lp->region;
508     }
509 }
510 
511 eh_region
get_eh_region_from_lp_number(int i)512 get_eh_region_from_lp_number (int i)
513 {
514   return get_eh_region_from_lp_number_fn (cfun, i);
515 }
516 
517 /* Returns true if the current function has exception handling regions.  */
518 
519 bool
current_function_has_exception_handlers(void)520 current_function_has_exception_handlers (void)
521 {
522   return cfun->eh->region_tree != NULL;
523 }
524 
525 /* A subroutine of duplicate_eh_regions.  Copy the eh_region tree at OLD.
526    Root it at OUTER, and apply LP_OFFSET to the lp numbers.  */
527 
528 struct duplicate_eh_regions_data
529 {
530   duplicate_eh_regions_map label_map;
531   void *label_map_data;
532   hash_map<void *, void *> *eh_map;
533 };
534 
535 static void
duplicate_eh_regions_1(struct duplicate_eh_regions_data * data,eh_region old_r,eh_region outer)536 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
537 			eh_region old_r, eh_region outer)
538 {
539   eh_landing_pad old_lp, new_lp;
540   eh_region new_r;
541 
542   new_r = gen_eh_region (old_r->type, outer);
543   gcc_assert (!data->eh_map->put (old_r, new_r));
544 
545   switch (old_r->type)
546     {
547     case ERT_CLEANUP:
548       break;
549 
550     case ERT_TRY:
551       {
552 	eh_catch oc, nc;
553 	for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
554 	  {
555 	    /* We should be doing all our region duplication before and
556 	       during inlining, which is before filter lists are created.  */
557 	    gcc_assert (oc->filter_list == NULL);
558 	    nc = gen_eh_region_catch (new_r, oc->type_list);
559 	    nc->label = data->label_map (oc->label, data->label_map_data);
560 	  }
561       }
562       break;
563 
564     case ERT_ALLOWED_EXCEPTIONS:
565       new_r->u.allowed.type_list = old_r->u.allowed.type_list;
566       if (old_r->u.allowed.label)
567 	new_r->u.allowed.label
568 	    = data->label_map (old_r->u.allowed.label, data->label_map_data);
569       else
570 	new_r->u.allowed.label = NULL_TREE;
571       break;
572 
573     case ERT_MUST_NOT_THROW:
574       new_r->u.must_not_throw.failure_loc =
575 	LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
576       new_r->u.must_not_throw.failure_decl =
577 	old_r->u.must_not_throw.failure_decl;
578       break;
579     }
580 
581   for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
582     {
583       /* Don't bother copying unused landing pads.  */
584       if (old_lp->post_landing_pad == NULL)
585 	continue;
586 
587       new_lp = gen_eh_landing_pad (new_r);
588       gcc_assert (!data->eh_map->put (old_lp, new_lp));
589 
590       new_lp->post_landing_pad
591 	= data->label_map (old_lp->post_landing_pad, data->label_map_data);
592       EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
593     }
594 
595   /* Make sure to preserve the original use of __cxa_end_cleanup.  */
596   new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
597 
598   for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
599     duplicate_eh_regions_1 (data, old_r, new_r);
600 }
601 
602 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
603    the current function and root the tree below OUTER_REGION.
604    The special case of COPY_REGION of NULL means all regions.
605    Remap labels using MAP/MAP_DATA callback.  Return a pointer map
606    that allows the caller to remap uses of both EH regions and
607    EH landing pads.  */
608 
609 hash_map<void *, void *> *
duplicate_eh_regions(struct function * ifun,eh_region copy_region,int outer_lp,duplicate_eh_regions_map map,void * map_data)610 duplicate_eh_regions (struct function *ifun,
611 		      eh_region copy_region, int outer_lp,
612 		      duplicate_eh_regions_map map, void *map_data)
613 {
614   struct duplicate_eh_regions_data data;
615   eh_region outer_region;
616 
617   if (flag_checking)
618     verify_eh_tree (ifun);
619 
620   data.label_map = map;
621   data.label_map_data = map_data;
622   data.eh_map = new hash_map<void *, void *>;
623 
624   outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
625 
626   /* Copy all the regions in the subtree.  */
627   if (copy_region)
628     duplicate_eh_regions_1 (&data, copy_region, outer_region);
629   else
630     {
631       eh_region r;
632       for (r = ifun->eh->region_tree; r ; r = r->next_peer)
633 	duplicate_eh_regions_1 (&data, r, outer_region);
634     }
635 
636   if (flag_checking)
637     verify_eh_tree (cfun);
638 
639   return data.eh_map;
640 }
641 
642 /* Return the region that is outer to both REGION_A and REGION_B in IFUN.  */
643 
644 eh_region
eh_region_outermost(struct function * ifun,eh_region region_a,eh_region region_b)645 eh_region_outermost (struct function *ifun, eh_region region_a,
646 		     eh_region region_b)
647 {
648   gcc_assert (ifun->eh->region_array);
649   gcc_assert (ifun->eh->region_tree);
650 
651   auto_sbitmap b_outer (ifun->eh->region_array->length ());
652   bitmap_clear (b_outer);
653 
654   do
655     {
656       bitmap_set_bit (b_outer, region_b->index);
657       region_b = region_b->outer;
658     }
659   while (region_b);
660 
661   do
662     {
663       if (bitmap_bit_p (b_outer, region_a->index))
664 	break;
665       region_a = region_a->outer;
666     }
667   while (region_a);
668 
669   return region_a;
670 }
671 
672 void
add_type_for_runtime(tree type)673 add_type_for_runtime (tree type)
674 {
675   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
676   if (TREE_CODE (type) == NOP_EXPR)
677     return;
678 
679   bool existed = false;
680   tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
681   if (!existed)
682     *slot = lang_hooks.eh_runtime_type (type);
683 }
684 
685 tree
lookup_type_for_runtime(tree type)686 lookup_type_for_runtime (tree type)
687 {
688   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
689   if (TREE_CODE (type) == NOP_EXPR)
690     return type;
691 
692   /* We should have always inserted the data earlier.  */
693   return *type_to_runtime_map->get (type);
694 }
695 
696 
697 /* Represent an entry in @TTypes for either catch actions
698    or exception filter actions.  */
699 struct ttypes_filter {
700   tree t;
701   int filter;
702 };
703 
704 /* Helper for ttypes_filter hashing.  */
705 
706 struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
707 {
708   typedef tree_node *compare_type;
709   static inline hashval_t hash (const ttypes_filter *);
710   static inline bool equal (const ttypes_filter *, const tree_node *);
711 };
712 
713 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
714    (a tree) for a @TTypes type node we are thinking about adding.  */
715 
716 inline bool
equal(const ttypes_filter * entry,const tree_node * data)717 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
718 {
719   return entry->t == data;
720 }
721 
722 inline hashval_t
hash(const ttypes_filter * entry)723 ttypes_filter_hasher::hash (const ttypes_filter *entry)
724 {
725   return TREE_HASH (entry->t);
726 }
727 
728 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
729 
730 
731 /* Helper for ehspec hashing.  */
732 
733 struct ehspec_hasher : free_ptr_hash <ttypes_filter>
734 {
735   static inline hashval_t hash (const ttypes_filter *);
736   static inline bool equal (const ttypes_filter *, const ttypes_filter *);
737 };
738 
739 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
740    exception specification list we are thinking about adding.  */
741 /* ??? Currently we use the type lists in the order given.  Someone
742    should put these in some canonical order.  */
743 
744 inline bool
equal(const ttypes_filter * entry,const ttypes_filter * data)745 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
746 {
747   return type_list_equal (entry->t, data->t);
748 }
749 
750 /* Hash function for exception specification lists.  */
751 
752 inline hashval_t
hash(const ttypes_filter * entry)753 ehspec_hasher::hash (const ttypes_filter *entry)
754 {
755   hashval_t h = 0;
756   tree list;
757 
758   for (list = entry->t; list ; list = TREE_CHAIN (list))
759     h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
760   return h;
761 }
762 
763 typedef hash_table<ehspec_hasher> ehspec_hash_type;
764 
765 
766 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
767    to speed up the search.  Return the filter value to be used.  */
768 
769 static int
add_ttypes_entry(ttypes_hash_type * ttypes_hash,tree type)770 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
771 {
772   struct ttypes_filter **slot, *n;
773 
774   slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
775 					  INSERT);
776 
777   if ((n = *slot) == NULL)
778     {
779       /* Filter value is a 1 based table index.  */
780 
781       n = XNEW (struct ttypes_filter);
782       n->t = type;
783       n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
784       *slot = n;
785 
786       vec_safe_push (cfun->eh->ttype_data, type);
787     }
788 
789   return n->filter;
790 }
791 
792 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
793    to speed up the search.  Return the filter value to be used.  */
794 
795 static int
add_ehspec_entry(ehspec_hash_type * ehspec_hash,ttypes_hash_type * ttypes_hash,tree list)796 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
797 		  tree list)
798 {
799   struct ttypes_filter **slot, *n;
800   struct ttypes_filter dummy;
801 
802   dummy.t = list;
803   slot = ehspec_hash->find_slot (&dummy, INSERT);
804 
805   if ((n = *slot) == NULL)
806     {
807       int len;
808 
809       if (targetm.arm_eabi_unwinder)
810 	len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
811       else
812 	len = vec_safe_length (cfun->eh->ehspec_data.other);
813 
814       /* Filter value is a -1 based byte index into a uleb128 buffer.  */
815 
816       n = XNEW (struct ttypes_filter);
817       n->t = list;
818       n->filter = -(len + 1);
819       *slot = n;
820 
821       /* Generate a 0 terminated list of filter values.  */
822       for (; list ; list = TREE_CHAIN (list))
823 	{
824 	  if (targetm.arm_eabi_unwinder)
825 	    vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
826 	  else
827 	    {
828 	      /* Look up each type in the list and encode its filter
829 		 value as a uleb128.  */
830 	      push_uleb128 (&cfun->eh->ehspec_data.other,
831 			    add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
832 	    }
833 	}
834       if (targetm.arm_eabi_unwinder)
835 	vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
836       else
837 	vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
838     }
839 
840   return n->filter;
841 }
842 
843 /* Generate the action filter values to be used for CATCH and
844    ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
845    we use lots of landing pads, and so every type or list can share
846    the same filter value, which saves table space.  */
847 
848 void
assign_filter_values(void)849 assign_filter_values (void)
850 {
851   int i;
852   eh_region r;
853   eh_catch c;
854 
855   vec_alloc (cfun->eh->ttype_data, 16);
856   if (targetm.arm_eabi_unwinder)
857     vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
858   else
859     vec_alloc (cfun->eh->ehspec_data.other, 64);
860 
861   ehspec_hash_type ehspec (31);
862   ttypes_hash_type ttypes (31);
863 
864   for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
865     {
866       if (r == NULL)
867 	continue;
868 
869       switch (r->type)
870 	{
871 	case ERT_TRY:
872 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
873 	    {
874 	      /* Whatever type_list is (NULL or true list), we build a list
875 		 of filters for the region.  */
876 	      c->filter_list = NULL_TREE;
877 
878 	      if (c->type_list != NULL)
879 		{
880 		  /* Get a filter value for each of the types caught and store
881 		     them in the region's dedicated list.  */
882 		  tree tp_node = c->type_list;
883 
884 		  for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
885 		    {
886 		      int flt
887 		       	= add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
888 		      tree flt_node = build_int_cst (integer_type_node, flt);
889 
890 		      c->filter_list
891 			= tree_cons (NULL_TREE, flt_node, c->filter_list);
892 		    }
893 		}
894 	      else
895 		{
896 		  /* Get a filter value for the NULL list also since it
897 		     will need an action record anyway.  */
898 		  int flt = add_ttypes_entry (&ttypes, NULL);
899 		  tree flt_node = build_int_cst (integer_type_node, flt);
900 
901 		  c->filter_list
902 		    = tree_cons (NULL_TREE, flt_node, NULL);
903 		}
904 	    }
905 	  break;
906 
907 	case ERT_ALLOWED_EXCEPTIONS:
908 	  r->u.allowed.filter
909 	    = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
910 	  break;
911 
912 	default:
913 	  break;
914 	}
915     }
916 }
917 
918 /* Emit SEQ into basic block just before INSN (that is assumed to be
919    first instruction of some existing BB and return the newly
920    produced block.  */
921 static basic_block
emit_to_new_bb_before(rtx_insn * seq,rtx_insn * insn)922 emit_to_new_bb_before (rtx_insn *seq, rtx_insn *insn)
923 {
924   rtx_insn *last;
925   basic_block bb;
926   edge e;
927   edge_iterator ei;
928 
929   /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
930      call), we don't want it to go into newly created landing pad or other EH
931      construct.  */
932   for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
933     if (e->flags & EDGE_FALLTHRU)
934       force_nonfallthru (e);
935     else
936       ei_next (&ei);
937   last = emit_insn_before (seq, insn);
938   if (BARRIER_P (last))
939     last = PREV_INSN (last);
940   bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
941   update_bb_for_insn (bb);
942   bb->flags |= BB_SUPERBLOCK;
943   return bb;
944 }
945 
946 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
947    at the rtl level.  Emit the code required by the target at a landing
948    pad for the given region.  */
949 
950 static void
expand_dw2_landing_pad_for_region(eh_region region)951 expand_dw2_landing_pad_for_region (eh_region region)
952 {
953   if (targetm.have_exception_receiver ())
954     emit_insn (targetm.gen_exception_receiver ());
955   else if (targetm.have_nonlocal_goto_receiver ())
956     emit_insn (targetm.gen_nonlocal_goto_receiver ());
957   else
958     { /* Nothing */ }
959 
960   if (region->exc_ptr_reg)
961     emit_move_insn (region->exc_ptr_reg,
962 		    gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
963   if (region->filter_reg)
964     emit_move_insn (region->filter_reg,
965 		    gen_rtx_REG (targetm.eh_return_filter_mode (),
966 				 EH_RETURN_DATA_REGNO (1)));
967 }
968 
969 /* Expand the extra code needed at landing pads for dwarf2 unwinding.  */
970 
971 static void
dw2_build_landing_pads(void)972 dw2_build_landing_pads (void)
973 {
974   int i;
975   eh_landing_pad lp;
976   int e_flags = EDGE_FALLTHRU;
977 
978   /* If we're going to partition blocks, we need to be able to add
979      new landing pads later, which means that we need to hold on to
980      the post-landing-pad block.  Prevent it from being merged away.
981      We'll remove this bit after partitioning.  */
982   if (flag_reorder_blocks_and_partition)
983     e_flags |= EDGE_PRESERVE;
984 
985   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
986     {
987       basic_block bb;
988       rtx_insn *seq;
989 
990       if (lp == NULL || lp->post_landing_pad == NULL)
991 	continue;
992 
993       start_sequence ();
994 
995       lp->landing_pad = gen_label_rtx ();
996       emit_label (lp->landing_pad);
997       LABEL_PRESERVE_P (lp->landing_pad) = 1;
998 
999       expand_dw2_landing_pad_for_region (lp->region);
1000 
1001       seq = get_insns ();
1002       end_sequence ();
1003 
1004       bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1005       bb->count = bb->next_bb->count;
1006       make_single_succ_edge (bb, bb->next_bb, e_flags);
1007       if (current_loops)
1008 	{
1009 	  struct loop *loop = bb->next_bb->loop_father;
1010 	  /* If we created a pre-header block, add the new block to the
1011 	     outer loop, otherwise to the loop itself.  */
1012 	  if (bb->next_bb == loop->header)
1013 	    add_bb_to_loop (bb, loop_outer (loop));
1014 	  else
1015 	    add_bb_to_loop (bb, loop);
1016 	}
1017     }
1018 }
1019 
1020 
1021 static vec<int> sjlj_lp_call_site_index;
1022 
1023 /* Process all active landing pads.  Assign each one a compact dispatch
1024    index, and a call-site index.  */
1025 
1026 static int
sjlj_assign_call_site_values(void)1027 sjlj_assign_call_site_values (void)
1028 {
1029   action_hash_type ar_hash (31);
1030   int i, disp_index;
1031   eh_landing_pad lp;
1032 
1033   vec_alloc (crtl->eh.action_record_data, 64);
1034 
1035   disp_index = 0;
1036   call_site_base = 1;
1037   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1038     if (lp && lp->post_landing_pad)
1039       {
1040 	int action, call_site;
1041 
1042 	/* First: build the action table.  */
1043 	action = collect_one_action_chain (&ar_hash, lp->region);
1044 
1045 	/* Next: assign call-site values.  If dwarf2 terms, this would be
1046 	   the region number assigned by convert_to_eh_region_ranges, but
1047 	   handles no-action and must-not-throw differently.  */
1048 	/* Map must-not-throw to otherwise unused call-site index 0.  */
1049 	if (action == -2)
1050 	  call_site = 0;
1051 	/* Map no-action to otherwise unused call-site index -1.  */
1052 	else if (action == -1)
1053 	  call_site = -1;
1054 	/* Otherwise, look it up in the table.  */
1055 	else
1056 	  call_site = add_call_site (GEN_INT (disp_index), action, 0);
1057 	sjlj_lp_call_site_index[i] = call_site;
1058 
1059 	disp_index++;
1060       }
1061 
1062   return disp_index;
1063 }
1064 
1065 /* Emit code to record the current call-site index before every
1066    insn that can throw.  */
1067 
1068 static void
sjlj_mark_call_sites(void)1069 sjlj_mark_call_sites (void)
1070 {
1071   int last_call_site = -2;
1072   rtx_insn *insn;
1073   rtx mem;
1074 
1075   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1076     {
1077       eh_landing_pad lp;
1078       eh_region r;
1079       bool nothrow;
1080       int this_call_site;
1081       rtx_insn *before, *p;
1082 
1083       /* Reset value tracking at extended basic block boundaries.  */
1084       if (LABEL_P (insn))
1085 	last_call_site = -2;
1086 
1087       /* If the function allocates dynamic stack space, the context must
1088 	 be updated after every allocation/deallocation accordingly.  */
1089       if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1090 	{
1091 	  rtx buf_addr;
1092 
1093 	  start_sequence ();
1094 	  buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1095 				    sjlj_fc_jbuf_ofs);
1096 	  expand_builtin_update_setjmp_buf (buf_addr);
1097 	  p = get_insns ();
1098 	  end_sequence ();
1099 	  emit_insn_before (p, insn);
1100 	}
1101 
1102       if (! INSN_P (insn))
1103 	continue;
1104 
1105       nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1106       if (nothrow)
1107 	continue;
1108       if (lp)
1109 	this_call_site = sjlj_lp_call_site_index[lp->index];
1110       else if (r == NULL)
1111 	{
1112 	  /* Calls (and trapping insns) without notes are outside any
1113 	     exception handling region in this function.  Mark them as
1114 	     no action.  */
1115 	  this_call_site = -1;
1116 	}
1117       else
1118 	{
1119 	  gcc_assert (r->type == ERT_MUST_NOT_THROW);
1120 	  this_call_site = 0;
1121 	}
1122 
1123       if (this_call_site != -1)
1124 	crtl->uses_eh_lsda = 1;
1125 
1126       if (this_call_site == last_call_site)
1127 	continue;
1128 
1129       /* Don't separate a call from it's argument loads.  */
1130       before = insn;
1131       if (CALL_P (insn))
1132 	before = find_first_parameter_load (insn, NULL);
1133 
1134       start_sequence ();
1135       mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1136 			    sjlj_fc_call_site_ofs);
1137       emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1138       p = get_insns ();
1139       end_sequence ();
1140 
1141       emit_insn_before (p, before);
1142       last_call_site = this_call_site;
1143     }
1144 }
1145 
1146 /* Construct the SjLj_Function_Context.  */
1147 
1148 static void
sjlj_emit_function_enter(rtx_code_label * dispatch_label)1149 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1150 {
1151   rtx_insn *fn_begin, *seq;
1152   rtx fc, mem;
1153   bool fn_begin_outside_block;
1154   rtx personality = get_personality_function (current_function_decl);
1155 
1156   fc = crtl->eh.sjlj_fc;
1157 
1158   start_sequence ();
1159 
1160   /* We're storing this libcall's address into memory instead of
1161      calling it directly.  Thus, we must call assemble_external_libcall
1162      here, as we cannot depend on emit_library_call to do it for us.  */
1163   assemble_external_libcall (personality);
1164   mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1165   emit_move_insn (mem, personality);
1166 
1167   mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1168   if (crtl->uses_eh_lsda)
1169     {
1170       char buf[20];
1171       rtx sym;
1172 
1173       ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1174       sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1175       SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1176       emit_move_insn (mem, sym);
1177     }
1178   else
1179     emit_move_insn (mem, const0_rtx);
1180 
1181   if (dispatch_label)
1182     {
1183       rtx addr = plus_constant (Pmode, XEXP (fc, 0), sjlj_fc_jbuf_ofs);
1184 
1185 #ifdef DONT_USE_BUILTIN_SETJMP
1186       addr = copy_addr_to_reg (addr);
1187       addr = convert_memory_address (ptr_mode, addr);
1188       tree addr_tree = make_tree (ptr_type_node, addr);
1189 
1190       tree call_expr = build_call_expr (setjmp_fn, 1, addr_tree);
1191       rtx x = expand_call (call_expr, NULL_RTX, false);
1192 
1193       emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1194 			       TYPE_MODE (integer_type_node), 0,
1195 			       dispatch_label,
1196 			       profile_probability::unlikely ());
1197 #else
1198       expand_builtin_setjmp_setup (addr, dispatch_label);
1199 #endif
1200     }
1201 
1202   emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1203 		     XEXP (fc, 0), Pmode);
1204 
1205   seq = get_insns ();
1206   end_sequence ();
1207 
1208   /* ??? Instead of doing this at the beginning of the function,
1209      do this in a block that is at loop level 0 and dominates all
1210      can_throw_internal instructions.  */
1211 
1212   fn_begin_outside_block = true;
1213   for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1214     if (NOTE_P (fn_begin))
1215       {
1216 	if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1217 	  break;
1218 	else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1219 	  fn_begin_outside_block = false;
1220       }
1221 
1222 #ifdef DONT_USE_BUILTIN_SETJMP
1223   if (dispatch_label)
1224     {
1225       /* The sequence contains a branch in the middle so we need to force
1226 	 the creation of a new basic block by means of BB_SUPERBLOCK.  */
1227       if (fn_begin_outside_block)
1228 	{
1229 	  basic_block bb
1230 	    = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1231 	  if (JUMP_P (BB_END (bb)))
1232 	    emit_insn_before (seq, BB_END (bb));
1233 	  else
1234 	    emit_insn_after (seq, BB_END (bb));
1235 	}
1236       else
1237 	emit_insn_after (seq, fn_begin);
1238 
1239       single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flags |= BB_SUPERBLOCK;
1240       return;
1241     }
1242 #endif
1243 
1244   if (fn_begin_outside_block)
1245     insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1246   else
1247     emit_insn_after (seq, fn_begin);
1248 }
1249 
1250 /* Call back from expand_function_end to know where we should put
1251    the call to unwind_sjlj_unregister_libfunc if needed.  */
1252 
1253 void
sjlj_emit_function_exit_after(rtx_insn * after)1254 sjlj_emit_function_exit_after (rtx_insn *after)
1255 {
1256   crtl->eh.sjlj_exit_after = after;
1257 }
1258 
1259 static void
sjlj_emit_function_exit(void)1260 sjlj_emit_function_exit (void)
1261 {
1262   rtx_insn *seq, *insn;
1263 
1264   start_sequence ();
1265 
1266   emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1267 		     XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1268 
1269   seq = get_insns ();
1270   end_sequence ();
1271 
1272   /* ??? Really this can be done in any block at loop level 0 that
1273      post-dominates all can_throw_internal instructions.  This is
1274      the last possible moment.  */
1275 
1276   insn = crtl->eh.sjlj_exit_after;
1277   if (LABEL_P (insn))
1278     insn = NEXT_INSN (insn);
1279 
1280   emit_insn_after (seq, insn);
1281 }
1282 
1283 static void
sjlj_emit_dispatch_table(rtx_code_label * dispatch_label,int num_dispatch)1284 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1285 {
1286   scalar_int_mode unwind_word_mode = targetm.unwind_word_mode ();
1287   scalar_int_mode filter_mode = targetm.eh_return_filter_mode ();
1288   eh_landing_pad lp;
1289   rtx mem, fc, exc_ptr_reg, filter_reg;
1290   rtx_insn *seq;
1291   basic_block bb;
1292   eh_region r;
1293   int i, disp_index;
1294   vec<tree> dispatch_labels = vNULL;
1295 
1296   fc = crtl->eh.sjlj_fc;
1297 
1298   start_sequence ();
1299 
1300   emit_label (dispatch_label);
1301 
1302 #ifndef DONT_USE_BUILTIN_SETJMP
1303   expand_builtin_setjmp_receiver (dispatch_label);
1304 
1305   /* The caller of expand_builtin_setjmp_receiver is responsible for
1306      making sure that the label doesn't vanish.  The only other caller
1307      is the expander for __builtin_setjmp_receiver, which places this
1308      label on the nonlocal_goto_label list.  Since we're modeling these
1309      CFG edges more exactly, we can use the forced_labels list instead.  */
1310   LABEL_PRESERVE_P (dispatch_label) = 1;
1311   vec_safe_push<rtx_insn *> (forced_labels, dispatch_label);
1312 #endif
1313 
1314   /* Load up exc_ptr and filter values from the function context.  */
1315   mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1316   if (unwind_word_mode != ptr_mode)
1317     {
1318 #ifdef POINTERS_EXTEND_UNSIGNED
1319       mem = convert_memory_address (ptr_mode, mem);
1320 #else
1321       mem = convert_to_mode (ptr_mode, mem, 0);
1322 #endif
1323     }
1324   exc_ptr_reg = force_reg (ptr_mode, mem);
1325 
1326   mem = adjust_address (fc, unwind_word_mode,
1327 			sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1328   if (unwind_word_mode != filter_mode)
1329     mem = convert_to_mode (filter_mode, mem, 0);
1330   filter_reg = force_reg (filter_mode, mem);
1331 
1332   /* Jump to one of the directly reachable regions.  */
1333 
1334   disp_index = 0;
1335   rtx_code_label *first_reachable_label = NULL;
1336 
1337   /* If there's exactly one call site in the function, don't bother
1338      generating a switch statement.  */
1339   if (num_dispatch > 1)
1340     dispatch_labels.create (num_dispatch);
1341 
1342   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1343     if (lp && lp->post_landing_pad)
1344       {
1345 	rtx_insn *seq2;
1346 	rtx_code_label *label;
1347 
1348 	start_sequence ();
1349 
1350 	lp->landing_pad = dispatch_label;
1351 
1352 	if (num_dispatch > 1)
1353 	  {
1354 	    tree t_label, case_elt, t;
1355 
1356 	    t_label = create_artificial_label (UNKNOWN_LOCATION);
1357 	    t = build_int_cst (integer_type_node, disp_index);
1358 	    case_elt = build_case_label (t, NULL, t_label);
1359 	    dispatch_labels.quick_push (case_elt);
1360 	    label = jump_target_rtx (t_label);
1361 	  }
1362 	else
1363 	  label = gen_label_rtx ();
1364 
1365 	if (disp_index == 0)
1366 	  first_reachable_label = label;
1367 	emit_label (label);
1368 
1369 	r = lp->region;
1370 	if (r->exc_ptr_reg)
1371 	  emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1372 	if (r->filter_reg)
1373 	  emit_move_insn (r->filter_reg, filter_reg);
1374 
1375 	seq2 = get_insns ();
1376 	end_sequence ();
1377 
1378 	rtx_insn *before = label_rtx (lp->post_landing_pad);
1379 	bb = emit_to_new_bb_before (seq2, before);
1380 	make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1381 	if (current_loops)
1382 	  {
1383 	    struct loop *loop = bb->next_bb->loop_father;
1384 	    /* If we created a pre-header block, add the new block to the
1385 	       outer loop, otherwise to the loop itself.  */
1386 	    if (bb->next_bb == loop->header)
1387 	      add_bb_to_loop (bb, loop_outer (loop));
1388 	    else
1389 	      add_bb_to_loop (bb, loop);
1390 	    /* ???  For multiple dispatches we will end up with edges
1391 	       from the loop tree root into this loop, making it a
1392 	       multiple-entry loop.  Discard all affected loops.  */
1393 	    if (num_dispatch > 1)
1394 	      {
1395 		for (loop = bb->loop_father;
1396 		     loop_outer (loop); loop = loop_outer (loop))
1397 		  mark_loop_for_removal (loop);
1398 	      }
1399 	  }
1400 
1401 	disp_index++;
1402       }
1403   gcc_assert (disp_index == num_dispatch);
1404 
1405   if (num_dispatch > 1)
1406     {
1407       rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1408 				 sjlj_fc_call_site_ofs);
1409       expand_sjlj_dispatch_table (disp, dispatch_labels);
1410     }
1411 
1412   seq = get_insns ();
1413   end_sequence ();
1414 
1415   bb = emit_to_new_bb_before (seq, first_reachable_label);
1416   if (num_dispatch == 1)
1417     {
1418       make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1419       if (current_loops)
1420 	{
1421 	  struct loop *loop = bb->next_bb->loop_father;
1422 	  /* If we created a pre-header block, add the new block to the
1423 	     outer loop, otherwise to the loop itself.  */
1424 	  if (bb->next_bb == loop->header)
1425 	    add_bb_to_loop (bb, loop_outer (loop));
1426 	  else
1427 	    add_bb_to_loop (bb, loop);
1428 	}
1429     }
1430   else
1431     {
1432       /* We are not wiring up edges here, but as the dispatcher call
1433          is at function begin simply associate the block with the
1434 	 outermost (non-)loop.  */
1435       if (current_loops)
1436 	add_bb_to_loop (bb, current_loops->tree_root);
1437     }
1438 }
1439 
1440 static void
sjlj_build_landing_pads(void)1441 sjlj_build_landing_pads (void)
1442 {
1443   int num_dispatch;
1444 
1445   num_dispatch = vec_safe_length (cfun->eh->lp_array);
1446   if (num_dispatch == 0)
1447     return;
1448   sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1449 
1450   num_dispatch = sjlj_assign_call_site_values ();
1451   if (num_dispatch > 0)
1452     {
1453       rtx_code_label *dispatch_label = gen_label_rtx ();
1454       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1455 					TYPE_MODE (sjlj_fc_type_node),
1456 					TYPE_ALIGN (sjlj_fc_type_node));
1457       crtl->eh.sjlj_fc
1458 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1459 			      int_size_in_bytes (sjlj_fc_type_node),
1460 			      align);
1461 
1462       sjlj_mark_call_sites ();
1463       sjlj_emit_function_enter (dispatch_label);
1464       sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1465       sjlj_emit_function_exit ();
1466     }
1467 
1468   /* If we do not have any landing pads, we may still need to register a
1469      personality routine and (empty) LSDA to handle must-not-throw regions.  */
1470   else if (function_needs_eh_personality (cfun) != eh_personality_none)
1471     {
1472       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1473 					TYPE_MODE (sjlj_fc_type_node),
1474 					TYPE_ALIGN (sjlj_fc_type_node));
1475       crtl->eh.sjlj_fc
1476 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1477 			      int_size_in_bytes (sjlj_fc_type_node),
1478 			      align);
1479 
1480       sjlj_mark_call_sites ();
1481       sjlj_emit_function_enter (NULL);
1482       sjlj_emit_function_exit ();
1483     }
1484 
1485   sjlj_lp_call_site_index.release ();
1486 }
1487 
1488 /* Update the sjlj function context.  This function should be called
1489    whenever we allocate or deallocate dynamic stack space.  */
1490 
1491 void
update_sjlj_context(void)1492 update_sjlj_context (void)
1493 {
1494   if (!flag_exceptions)
1495     return;
1496 
1497   emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1498 }
1499 
1500 /* After initial rtl generation, call back to finish generating
1501    exception support code.  */
1502 
1503 void
finish_eh_generation(void)1504 finish_eh_generation (void)
1505 {
1506   basic_block bb;
1507 
1508   /* Construct the landing pads.  */
1509   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1510     sjlj_build_landing_pads ();
1511   else
1512     dw2_build_landing_pads ();
1513 
1514   break_superblocks ();
1515 
1516   /* Redirect all EH edges from the post_landing_pad to the landing pad.  */
1517   FOR_EACH_BB_FN (bb, cfun)
1518     {
1519       eh_landing_pad lp;
1520       edge_iterator ei;
1521       edge e;
1522 
1523       lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1524 
1525       FOR_EACH_EDGE (e, ei, bb->succs)
1526 	if (e->flags & EDGE_EH)
1527 	  break;
1528 
1529       /* We should not have generated any new throwing insns during this
1530 	 pass, and we should not have lost any EH edges, so we only need
1531 	 to handle two cases here:
1532 	 (1) reachable handler and an existing edge to post-landing-pad,
1533 	 (2) no reachable handler and no edge.  */
1534       gcc_assert ((lp != NULL) == (e != NULL));
1535       if (lp != NULL)
1536 	{
1537 	  gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1538 
1539 	  redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1540 	  e->flags |= (CALL_P (BB_END (bb))
1541 		       ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1542 		       : EDGE_ABNORMAL);
1543 	}
1544     }
1545 
1546   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1547       /* Kludge for Alpha (see alpha_gp_save_rtx).  */
1548       || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1549     commit_edge_insertions ();
1550 }
1551 
1552 /* This section handles removing dead code for flow.  */
1553 
1554 void
remove_eh_landing_pad(eh_landing_pad lp)1555 remove_eh_landing_pad (eh_landing_pad lp)
1556 {
1557   eh_landing_pad *pp;
1558 
1559   for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1560     continue;
1561   *pp = lp->next_lp;
1562 
1563   if (lp->post_landing_pad)
1564     EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1565   (*cfun->eh->lp_array)[lp->index] = NULL;
1566 }
1567 
1568 /* Splice the EH region at PP from the region tree.  */
1569 
1570 static void
remove_eh_handler_splicer(eh_region * pp)1571 remove_eh_handler_splicer (eh_region *pp)
1572 {
1573   eh_region region = *pp;
1574   eh_landing_pad lp;
1575 
1576   for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1577     {
1578       if (lp->post_landing_pad)
1579 	EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1580       (*cfun->eh->lp_array)[lp->index] = NULL;
1581     }
1582 
1583   if (region->inner)
1584     {
1585       eh_region p, outer;
1586       outer = region->outer;
1587 
1588       *pp = p = region->inner;
1589       do
1590 	{
1591 	  p->outer = outer;
1592 	  pp = &p->next_peer;
1593 	  p = *pp;
1594 	}
1595       while (p);
1596     }
1597   *pp = region->next_peer;
1598 
1599   (*cfun->eh->region_array)[region->index] = NULL;
1600 }
1601 
1602 /* Splice a single EH region REGION from the region tree.
1603 
1604    To unlink REGION, we need to find the pointer to it with a relatively
1605    expensive search in REGION's outer region.  If you are going to
1606    remove a number of handlers, using remove_unreachable_eh_regions may
1607    be a better option.  */
1608 
1609 void
remove_eh_handler(eh_region region)1610 remove_eh_handler (eh_region region)
1611 {
1612   eh_region *pp, *pp_start, p, outer;
1613 
1614   outer = region->outer;
1615   if (outer)
1616     pp_start = &outer->inner;
1617   else
1618     pp_start = &cfun->eh->region_tree;
1619   for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1620     continue;
1621 
1622   remove_eh_handler_splicer (pp);
1623 }
1624 
1625 /* Worker for remove_unreachable_eh_regions.
1626    PP is a pointer to the region to start a region tree depth-first
1627    search from.  R_REACHABLE is the set of regions that have to be
1628    preserved.  */
1629 
1630 static void
remove_unreachable_eh_regions_worker(eh_region * pp,sbitmap r_reachable)1631 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1632 {
1633   while (*pp)
1634     {
1635       eh_region region = *pp;
1636       remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1637       if (!bitmap_bit_p (r_reachable, region->index))
1638 	remove_eh_handler_splicer (pp);
1639       else
1640 	pp = &region->next_peer;
1641     }
1642 }
1643 
1644 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1645    Do this by traversing the EH tree top-down and splice out regions that
1646    are not marked.  By removing regions from the leaves, we avoid costly
1647    searches in the region tree.  */
1648 
1649 void
remove_unreachable_eh_regions(sbitmap r_reachable)1650 remove_unreachable_eh_regions (sbitmap r_reachable)
1651 {
1652   remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1653 }
1654 
1655 /* Invokes CALLBACK for every exception handler landing pad label.
1656    Only used by reload hackery; should not be used by new code.  */
1657 
1658 void
for_each_eh_label(void (* callback)(rtx))1659 for_each_eh_label (void (*callback) (rtx))
1660 {
1661   eh_landing_pad lp;
1662   int i;
1663 
1664   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1665     {
1666       if (lp)
1667 	{
1668 	  rtx_code_label *lab = lp->landing_pad;
1669 	  if (lab && LABEL_P (lab))
1670 	    (*callback) (lab);
1671 	}
1672     }
1673 }
1674 
1675 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1676    call insn.
1677 
1678    At the gimple level, we use LP_NR
1679        > 0 : The statement transfers to landing pad LP_NR
1680        = 0 : The statement is outside any EH region
1681        < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1682 
1683    At the rtl level, we use LP_NR
1684        > 0 : The insn transfers to landing pad LP_NR
1685        = 0 : The insn cannot throw
1686        < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1687        = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1688        missing note: The insn is outside any EH region.
1689 
1690   ??? This difference probably ought to be avoided.  We could stand
1691   to record nothrow for arbitrary gimple statements, and so avoid
1692   some moderately complex lookups in stmt_could_throw_p.  Perhaps
1693   NOTHROW should be mapped on both sides to INT_MIN.  Perhaps the
1694   no-nonlocal-goto property should be recorded elsewhere as a bit
1695   on the call_insn directly.  Perhaps we should make more use of
1696   attaching the trees to call_insns (reachable via symbol_ref in
1697   direct call cases) and just pull the data out of the trees.  */
1698 
1699 void
make_reg_eh_region_note(rtx_insn * insn,int ecf_flags,int lp_nr)1700 make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
1701 {
1702   rtx value;
1703   if (ecf_flags & ECF_NOTHROW)
1704     value = const0_rtx;
1705   else if (lp_nr != 0)
1706     value = GEN_INT (lp_nr);
1707   else
1708     return;
1709   add_reg_note (insn, REG_EH_REGION, value);
1710 }
1711 
1712 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1713    nor perform a non-local goto.  Replace the region note if it
1714    already exists.  */
1715 
1716 void
make_reg_eh_region_note_nothrow_nononlocal(rtx_insn * insn)1717 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
1718 {
1719   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1720   rtx intmin = GEN_INT (INT_MIN);
1721 
1722   if (note != 0)
1723     XEXP (note, 0) = intmin;
1724   else
1725     add_reg_note (insn, REG_EH_REGION, intmin);
1726 }
1727 
1728 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1729    to the contrary.  */
1730 
1731 bool
insn_could_throw_p(const_rtx insn)1732 insn_could_throw_p (const_rtx insn)
1733 {
1734   if (!flag_exceptions)
1735     return false;
1736   if (CALL_P (insn))
1737     return true;
1738   if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1739     return may_trap_p (PATTERN (insn));
1740   return false;
1741 }
1742 
1743 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1744    at FIRST and ending at LAST.  NOTE_OR_INSN is either the source insn
1745    to look for a note, or the note itself.  */
1746 
1747 void
copy_reg_eh_region_note_forward(rtx note_or_insn,rtx_insn * first,rtx last)1748 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1749 {
1750   rtx_insn *insn;
1751   rtx note = note_or_insn;
1752 
1753   if (INSN_P (note_or_insn))
1754     {
1755       note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1756       if (note == NULL)
1757 	return;
1758     }
1759   else if (is_a <rtx_insn *> (note_or_insn))
1760     return;
1761   note = XEXP (note, 0);
1762 
1763   for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1764     if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1765         && insn_could_throw_p (insn))
1766       add_reg_note (insn, REG_EH_REGION, note);
1767 }
1768 
1769 /* Likewise, but iterate backward.  */
1770 
1771 void
copy_reg_eh_region_note_backward(rtx note_or_insn,rtx_insn * last,rtx first)1772 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1773 {
1774   rtx_insn *insn;
1775   rtx note = note_or_insn;
1776 
1777   if (INSN_P (note_or_insn))
1778     {
1779       note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1780       if (note == NULL)
1781 	return;
1782     }
1783   else if (is_a <rtx_insn *> (note_or_insn))
1784     return;
1785   note = XEXP (note, 0);
1786 
1787   for (insn = last; insn != first; insn = PREV_INSN (insn))
1788     if (insn_could_throw_p (insn))
1789       add_reg_note (insn, REG_EH_REGION, note);
1790 }
1791 
1792 
1793 /* Extract all EH information from INSN.  Return true if the insn
1794    was marked NOTHROW.  */
1795 
1796 static bool
get_eh_region_and_lp_from_rtx(const_rtx insn,eh_region * pr,eh_landing_pad * plp)1797 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1798 			       eh_landing_pad *plp)
1799 {
1800   eh_landing_pad lp = NULL;
1801   eh_region r = NULL;
1802   bool ret = false;
1803   rtx note;
1804   int lp_nr;
1805 
1806   if (! INSN_P (insn))
1807     goto egress;
1808 
1809   if (NONJUMP_INSN_P (insn)
1810       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1811     insn = XVECEXP (PATTERN (insn), 0, 0);
1812 
1813   note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1814   if (!note)
1815     {
1816       ret = !insn_could_throw_p (insn);
1817       goto egress;
1818     }
1819 
1820   lp_nr = INTVAL (XEXP (note, 0));
1821   if (lp_nr == 0 || lp_nr == INT_MIN)
1822     {
1823       ret = true;
1824       goto egress;
1825     }
1826 
1827   if (lp_nr < 0)
1828     r = (*cfun->eh->region_array)[-lp_nr];
1829   else
1830     {
1831       lp = (*cfun->eh->lp_array)[lp_nr];
1832       r = lp->region;
1833     }
1834 
1835  egress:
1836   *plp = lp;
1837   *pr = r;
1838   return ret;
1839 }
1840 
1841 /* Return the landing pad to which INSN may go, or NULL if it does not
1842    have a reachable landing pad within this function.  */
1843 
1844 eh_landing_pad
get_eh_landing_pad_from_rtx(const_rtx insn)1845 get_eh_landing_pad_from_rtx (const_rtx insn)
1846 {
1847   eh_landing_pad lp;
1848   eh_region r;
1849 
1850   get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1851   return lp;
1852 }
1853 
1854 /* Return the region to which INSN may go, or NULL if it does not
1855    have a reachable region within this function.  */
1856 
1857 eh_region
get_eh_region_from_rtx(const_rtx insn)1858 get_eh_region_from_rtx (const_rtx insn)
1859 {
1860   eh_landing_pad lp;
1861   eh_region r;
1862 
1863   get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1864   return r;
1865 }
1866 
1867 /* Return true if INSN throws and is caught by something in this function.  */
1868 
1869 bool
can_throw_internal(const_rtx insn)1870 can_throw_internal (const_rtx insn)
1871 {
1872   return get_eh_landing_pad_from_rtx (insn) != NULL;
1873 }
1874 
1875 /* Return true if INSN throws and escapes from the current function.  */
1876 
1877 bool
can_throw_external(const_rtx insn)1878 can_throw_external (const_rtx insn)
1879 {
1880   eh_landing_pad lp;
1881   eh_region r;
1882   bool nothrow;
1883 
1884   if (! INSN_P (insn))
1885     return false;
1886 
1887   if (NONJUMP_INSN_P (insn)
1888       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1889     {
1890       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1891       int i, n = seq->len ();
1892 
1893       for (i = 0; i < n; i++)
1894 	if (can_throw_external (seq->element (i)))
1895 	  return true;
1896 
1897       return false;
1898     }
1899 
1900   nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1901 
1902   /* If we can't throw, we obviously can't throw external.  */
1903   if (nothrow)
1904     return false;
1905 
1906   /* If we have an internal landing pad, then we're not external.  */
1907   if (lp != NULL)
1908     return false;
1909 
1910   /* If we're not within an EH region, then we are external.  */
1911   if (r == NULL)
1912     return true;
1913 
1914   /* The only thing that ought to be left is MUST_NOT_THROW regions,
1915      which don't always have landing pads.  */
1916   gcc_assert (r->type == ERT_MUST_NOT_THROW);
1917   return false;
1918 }
1919 
1920 /* Return true if INSN cannot throw at all.  */
1921 
1922 bool
insn_nothrow_p(const_rtx insn)1923 insn_nothrow_p (const_rtx insn)
1924 {
1925   eh_landing_pad lp;
1926   eh_region r;
1927 
1928   if (! INSN_P (insn))
1929     return true;
1930 
1931   if (NONJUMP_INSN_P (insn)
1932       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1933     {
1934       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1935       int i, n = seq->len ();
1936 
1937       for (i = 0; i < n; i++)
1938 	if (!insn_nothrow_p (seq->element (i)))
1939 	  return false;
1940 
1941       return true;
1942     }
1943 
1944   return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1945 }
1946 
1947 /* Return true if INSN can perform a non-local goto.  */
1948 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION.  */
1949 
1950 bool
can_nonlocal_goto(const rtx_insn * insn)1951 can_nonlocal_goto (const rtx_insn *insn)
1952 {
1953   if (nonlocal_goto_handler_labels && CALL_P (insn))
1954     {
1955       rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1956       if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1957 	return true;
1958     }
1959   return false;
1960 }
1961 
1962 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls.  */
1963 
1964 static unsigned int
set_nothrow_function_flags(void)1965 set_nothrow_function_flags (void)
1966 {
1967   rtx_insn *insn;
1968 
1969   crtl->nothrow = 1;
1970 
1971   /* Assume crtl->all_throwers_are_sibcalls until we encounter
1972      something that can throw an exception.  We specifically exempt
1973      CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1974      and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
1975      is optimistic.  */
1976 
1977   crtl->all_throwers_are_sibcalls = 1;
1978 
1979   /* If we don't know that this implementation of the function will
1980      actually be used, then we must not set TREE_NOTHROW, since
1981      callers must not assume that this function does not throw.  */
1982   if (TREE_NOTHROW (current_function_decl))
1983     return 0;
1984 
1985   if (! flag_exceptions)
1986     return 0;
1987 
1988   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1989     if (can_throw_external (insn))
1990       {
1991         crtl->nothrow = 0;
1992 
1993 	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1994 	  {
1995 	    crtl->all_throwers_are_sibcalls = 0;
1996 	    return 0;
1997 	  }
1998       }
1999 
2000   if (crtl->nothrow
2001       && (cgraph_node::get (current_function_decl)->get_availability ()
2002           >= AVAIL_AVAILABLE))
2003     {
2004       struct cgraph_node *node = cgraph_node::get (current_function_decl);
2005       struct cgraph_edge *e;
2006       for (e = node->callers; e; e = e->next_caller)
2007         e->can_throw_external = false;
2008       node->set_nothrow_flag (true);
2009 
2010       if (dump_file)
2011 	fprintf (dump_file, "Marking function nothrow: %s\n\n",
2012 		 current_function_name ());
2013     }
2014   return 0;
2015 }
2016 
2017 namespace {
2018 
2019 const pass_data pass_data_set_nothrow_function_flags =
2020 {
2021   RTL_PASS, /* type */
2022   "nothrow", /* name */
2023   OPTGROUP_NONE, /* optinfo_flags */
2024   TV_NONE, /* tv_id */
2025   0, /* properties_required */
2026   0, /* properties_provided */
2027   0, /* properties_destroyed */
2028   0, /* todo_flags_start */
2029   0, /* todo_flags_finish */
2030 };
2031 
2032 class pass_set_nothrow_function_flags : public rtl_opt_pass
2033 {
2034 public:
pass_set_nothrow_function_flags(gcc::context * ctxt)2035   pass_set_nothrow_function_flags (gcc::context *ctxt)
2036     : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2037   {}
2038 
2039   /* opt_pass methods: */
execute(function *)2040   virtual unsigned int execute (function *)
2041     {
2042       return set_nothrow_function_flags ();
2043     }
2044 
2045 }; // class pass_set_nothrow_function_flags
2046 
2047 } // anon namespace
2048 
2049 rtl_opt_pass *
make_pass_set_nothrow_function_flags(gcc::context * ctxt)2050 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2051 {
2052   return new pass_set_nothrow_function_flags (ctxt);
2053 }
2054 
2055 
2056 /* Various hooks for unwind library.  */
2057 
2058 /* Expand the EH support builtin functions:
2059    __builtin_eh_pointer and __builtin_eh_filter.  */
2060 
2061 static eh_region
expand_builtin_eh_common(tree region_nr_t)2062 expand_builtin_eh_common (tree region_nr_t)
2063 {
2064   HOST_WIDE_INT region_nr;
2065   eh_region region;
2066 
2067   gcc_assert (tree_fits_shwi_p (region_nr_t));
2068   region_nr = tree_to_shwi (region_nr_t);
2069 
2070   region = (*cfun->eh->region_array)[region_nr];
2071 
2072   /* ??? We shouldn't have been able to delete a eh region without
2073      deleting all the code that depended on it.  */
2074   gcc_assert (region != NULL);
2075 
2076   return region;
2077 }
2078 
2079 /* Expand to the exc_ptr value from the given eh region.  */
2080 
2081 rtx
expand_builtin_eh_pointer(tree exp)2082 expand_builtin_eh_pointer (tree exp)
2083 {
2084   eh_region region
2085     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2086   if (region->exc_ptr_reg == NULL)
2087     region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2088   return region->exc_ptr_reg;
2089 }
2090 
2091 /* Expand to the filter value from the given eh region.  */
2092 
2093 rtx
expand_builtin_eh_filter(tree exp)2094 expand_builtin_eh_filter (tree exp)
2095 {
2096   eh_region region
2097     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2098   if (region->filter_reg == NULL)
2099     region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2100   return region->filter_reg;
2101 }
2102 
2103 /* Copy the exc_ptr and filter values from one landing pad's registers
2104    to another.  This is used to inline the resx statement.  */
2105 
2106 rtx
expand_builtin_eh_copy_values(tree exp)2107 expand_builtin_eh_copy_values (tree exp)
2108 {
2109   eh_region dst
2110     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2111   eh_region src
2112     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2113   scalar_int_mode fmode = targetm.eh_return_filter_mode ();
2114 
2115   if (dst->exc_ptr_reg == NULL)
2116     dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2117   if (src->exc_ptr_reg == NULL)
2118     src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2119 
2120   if (dst->filter_reg == NULL)
2121     dst->filter_reg = gen_reg_rtx (fmode);
2122   if (src->filter_reg == NULL)
2123     src->filter_reg = gen_reg_rtx (fmode);
2124 
2125   emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2126   emit_move_insn (dst->filter_reg, src->filter_reg);
2127 
2128   return const0_rtx;
2129 }
2130 
2131 /* Do any necessary initialization to access arbitrary stack frames.
2132    On the SPARC, this means flushing the register windows.  */
2133 
2134 void
expand_builtin_unwind_init(void)2135 expand_builtin_unwind_init (void)
2136 {
2137   /* Set this so all the registers get saved in our frame; we need to be
2138      able to copy the saved values for any registers from frames we unwind.  */
2139   crtl->saves_all_registers = 1;
2140 
2141   SETUP_FRAME_ADDRESSES ();
2142 }
2143 
2144 /* Map a non-negative number to an eh return data register number; expands
2145    to -1 if no return data register is associated with the input number.
2146    At least the inputs 0 and 1 must be mapped; the target may provide more.  */
2147 
2148 rtx
expand_builtin_eh_return_data_regno(tree exp)2149 expand_builtin_eh_return_data_regno (tree exp)
2150 {
2151   tree which = CALL_EXPR_ARG (exp, 0);
2152   unsigned HOST_WIDE_INT iwhich;
2153 
2154   if (TREE_CODE (which) != INTEGER_CST)
2155     {
2156       error ("argument of %<__builtin_eh_return_regno%> must be constant");
2157       return constm1_rtx;
2158     }
2159 
2160   iwhich = tree_to_uhwi (which);
2161   iwhich = EH_RETURN_DATA_REGNO (iwhich);
2162   if (iwhich == INVALID_REGNUM)
2163     return constm1_rtx;
2164 
2165 #ifdef DWARF_FRAME_REGNUM
2166   iwhich = DWARF_FRAME_REGNUM (iwhich);
2167 #else
2168   iwhich = DBX_REGISTER_NUMBER (iwhich);
2169 #endif
2170 
2171   return GEN_INT (iwhich);
2172 }
2173 
2174 /* Given a value extracted from the return address register or stack slot,
2175    return the actual address encoded in that value.  */
2176 
2177 rtx
expand_builtin_extract_return_addr(tree addr_tree)2178 expand_builtin_extract_return_addr (tree addr_tree)
2179 {
2180   rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2181 
2182   if (GET_MODE (addr) != Pmode
2183       && GET_MODE (addr) != VOIDmode)
2184     {
2185 #ifdef POINTERS_EXTEND_UNSIGNED
2186       addr = convert_memory_address (Pmode, addr);
2187 #else
2188       addr = convert_to_mode (Pmode, addr, 0);
2189 #endif
2190     }
2191 
2192   /* First mask out any unwanted bits.  */
2193   rtx mask = MASK_RETURN_ADDR;
2194   if (mask)
2195     expand_and (Pmode, addr, mask, addr);
2196 
2197   /* Then adjust to find the real return address.  */
2198   if (RETURN_ADDR_OFFSET)
2199     addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2200 
2201   return addr;
2202 }
2203 
2204 /* Given an actual address in addr_tree, do any necessary encoding
2205    and return the value to be stored in the return address register or
2206    stack slot so the epilogue will return to that address.  */
2207 
2208 rtx
expand_builtin_frob_return_addr(tree addr_tree)2209 expand_builtin_frob_return_addr (tree addr_tree)
2210 {
2211   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2212 
2213   addr = convert_memory_address (Pmode, addr);
2214 
2215   if (RETURN_ADDR_OFFSET)
2216     {
2217       addr = force_reg (Pmode, addr);
2218       addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2219     }
2220 
2221   return addr;
2222 }
2223 
2224 /* Set up the epilogue with the magic bits we'll need to return to the
2225    exception handler.  */
2226 
2227 void
expand_builtin_eh_return(tree stackadj_tree ATTRIBUTE_UNUSED,tree handler_tree)2228 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2229 			  tree handler_tree)
2230 {
2231   rtx tmp;
2232 
2233 #ifdef EH_RETURN_STACKADJ_RTX
2234   tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2235 		     VOIDmode, EXPAND_NORMAL);
2236   tmp = convert_memory_address (Pmode, tmp);
2237   if (!crtl->eh.ehr_stackadj)
2238     crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp);
2239   else if (tmp != crtl->eh.ehr_stackadj)
2240     emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2241 #endif
2242 
2243   tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2244 		     VOIDmode, EXPAND_NORMAL);
2245   tmp = convert_memory_address (Pmode, tmp);
2246   if (!crtl->eh.ehr_handler)
2247     crtl->eh.ehr_handler = copy_addr_to_reg (tmp);
2248   else if (tmp != crtl->eh.ehr_handler)
2249     emit_move_insn (crtl->eh.ehr_handler, tmp);
2250 
2251   if (!crtl->eh.ehr_label)
2252     crtl->eh.ehr_label = gen_label_rtx ();
2253   emit_jump (crtl->eh.ehr_label);
2254 }
2255 
2256 /* Expand __builtin_eh_return.  This exit path from the function loads up
2257    the eh return data registers, adjusts the stack, and branches to a
2258    given PC other than the normal return address.  */
2259 
2260 void
expand_eh_return(void)2261 expand_eh_return (void)
2262 {
2263   rtx_code_label *around_label;
2264 
2265   if (! crtl->eh.ehr_label)
2266     return;
2267 
2268   crtl->calls_eh_return = 1;
2269 
2270 #ifdef EH_RETURN_STACKADJ_RTX
2271   emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2272 #endif
2273 
2274   around_label = gen_label_rtx ();
2275   emit_jump (around_label);
2276 
2277   emit_label (crtl->eh.ehr_label);
2278   clobber_return_register ();
2279 
2280 #ifdef EH_RETURN_STACKADJ_RTX
2281   emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2282 #endif
2283 
2284   if (targetm.have_eh_return ())
2285     emit_insn (targetm.gen_eh_return (crtl->eh.ehr_handler));
2286   else
2287     {
2288       if (rtx handler = EH_RETURN_HANDLER_RTX)
2289 	emit_move_insn (handler, crtl->eh.ehr_handler);
2290       else
2291 	error ("%<__builtin_eh_return%> not supported on this target");
2292     }
2293 
2294   emit_label (around_label);
2295 }
2296 
2297 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2298    POINTERS_EXTEND_UNSIGNED and return it.  */
2299 
2300 rtx
expand_builtin_extend_pointer(tree addr_tree)2301 expand_builtin_extend_pointer (tree addr_tree)
2302 {
2303   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2304   int extend;
2305 
2306 #ifdef POINTERS_EXTEND_UNSIGNED
2307   extend = POINTERS_EXTEND_UNSIGNED;
2308 #else
2309   /* The previous EH code did an unsigned extend by default, so we do this also
2310      for consistency.  */
2311   extend = 1;
2312 #endif
2313 
2314   return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2315 }
2316 
2317 static int
add_action_record(action_hash_type * ar_hash,int filter,int next)2318 add_action_record (action_hash_type *ar_hash, int filter, int next)
2319 {
2320   struct action_record **slot, *new_ar, tmp;
2321 
2322   tmp.filter = filter;
2323   tmp.next = next;
2324   slot = ar_hash->find_slot (&tmp, INSERT);
2325 
2326   if ((new_ar = *slot) == NULL)
2327     {
2328       new_ar = XNEW (struct action_record);
2329       new_ar->offset = crtl->eh.action_record_data->length () + 1;
2330       new_ar->filter = filter;
2331       new_ar->next = next;
2332       *slot = new_ar;
2333 
2334       /* The filter value goes in untouched.  The link to the next
2335 	 record is a "self-relative" byte offset, or zero to indicate
2336 	 that there is no next record.  So convert the absolute 1 based
2337 	 indices we've been carrying around into a displacement.  */
2338 
2339       push_sleb128 (&crtl->eh.action_record_data, filter);
2340       if (next)
2341 	next -= crtl->eh.action_record_data->length () + 1;
2342       push_sleb128 (&crtl->eh.action_record_data, next);
2343     }
2344 
2345   return new_ar->offset;
2346 }
2347 
2348 static int
collect_one_action_chain(action_hash_type * ar_hash,eh_region region)2349 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2350 {
2351   int next;
2352 
2353   /* If we've reached the top of the region chain, then we have
2354      no actions, and require no landing pad.  */
2355   if (region == NULL)
2356     return -1;
2357 
2358   switch (region->type)
2359     {
2360     case ERT_CLEANUP:
2361       {
2362 	eh_region r;
2363 	/* A cleanup adds a zero filter to the beginning of the chain, but
2364 	   there are special cases to look out for.  If there are *only*
2365 	   cleanups along a path, then it compresses to a zero action.
2366 	   Further, if there are multiple cleanups along a path, we only
2367 	   need to represent one of them, as that is enough to trigger
2368 	   entry to the landing pad at runtime.  */
2369 	next = collect_one_action_chain (ar_hash, region->outer);
2370 	if (next <= 0)
2371 	  return 0;
2372 	for (r = region->outer; r ; r = r->outer)
2373 	  if (r->type == ERT_CLEANUP)
2374 	    return next;
2375 	return add_action_record (ar_hash, 0, next);
2376       }
2377 
2378     case ERT_TRY:
2379       {
2380 	eh_catch c;
2381 
2382 	/* Process the associated catch regions in reverse order.
2383 	   If there's a catch-all handler, then we don't need to
2384 	   search outer regions.  Use a magic -3 value to record
2385 	   that we haven't done the outer search.  */
2386 	next = -3;
2387 	for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2388 	  {
2389 	    if (c->type_list == NULL)
2390 	      {
2391 		/* Retrieve the filter from the head of the filter list
2392 		   where we have stored it (see assign_filter_values).  */
2393 		int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2394 		next = add_action_record (ar_hash, filter, 0);
2395 	      }
2396 	    else
2397 	      {
2398 		/* Once the outer search is done, trigger an action record for
2399 		   each filter we have.  */
2400 		tree flt_node;
2401 
2402 		if (next == -3)
2403 		  {
2404 		    next = collect_one_action_chain (ar_hash, region->outer);
2405 
2406 		    /* If there is no next action, terminate the chain.  */
2407 		    if (next == -1)
2408 		      next = 0;
2409 		    /* If all outer actions are cleanups or must_not_throw,
2410 		       we'll have no action record for it, since we had wanted
2411 		       to encode these states in the call-site record directly.
2412 		       Add a cleanup action to the chain to catch these.  */
2413 		    else if (next <= 0)
2414 		      next = add_action_record (ar_hash, 0, 0);
2415 		  }
2416 
2417 		flt_node = c->filter_list;
2418 		for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2419 		  {
2420 		    int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2421 		    next = add_action_record (ar_hash, filter, next);
2422 		  }
2423 	      }
2424 	  }
2425 	return next;
2426       }
2427 
2428     case ERT_ALLOWED_EXCEPTIONS:
2429       /* An exception specification adds its filter to the
2430 	 beginning of the chain.  */
2431       next = collect_one_action_chain (ar_hash, region->outer);
2432 
2433       /* If there is no next action, terminate the chain.  */
2434       if (next == -1)
2435 	next = 0;
2436       /* If all outer actions are cleanups or must_not_throw,
2437 	 we'll have no action record for it, since we had wanted
2438 	 to encode these states in the call-site record directly.
2439 	 Add a cleanup action to the chain to catch these.  */
2440       else if (next <= 0)
2441 	next = add_action_record (ar_hash, 0, 0);
2442 
2443       return add_action_record (ar_hash, region->u.allowed.filter, next);
2444 
2445     case ERT_MUST_NOT_THROW:
2446       /* A must-not-throw region with no inner handlers or cleanups
2447 	 requires no call-site entry.  Note that this differs from
2448 	 the no handler or cleanup case in that we do require an lsda
2449 	 to be generated.  Return a magic -2 value to record this.  */
2450       return -2;
2451     }
2452 
2453   gcc_unreachable ();
2454 }
2455 
2456 static int
add_call_site(rtx landing_pad,int action,int section)2457 add_call_site (rtx landing_pad, int action, int section)
2458 {
2459   call_site_record record;
2460 
2461   record = ggc_alloc<call_site_record_d> ();
2462   record->landing_pad = landing_pad;
2463   record->action = action;
2464 
2465   vec_safe_push (crtl->eh.call_site_record_v[section], record);
2466 
2467   return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2468 }
2469 
2470 static rtx_note *
emit_note_eh_region_end(rtx_insn * insn)2471 emit_note_eh_region_end (rtx_insn *insn)
2472 {
2473   return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2474 }
2475 
2476 /* Add NOP after NOTE_INSN_SWITCH_TEXT_SECTIONS when the cold section starts
2477    with landing pad.
2478    With landing pad being at offset 0 from the start label of the section
2479    we would miss EH delivery because 0 is special and means no landing pad.  */
2480 
2481 static bool
maybe_add_nop_after_section_switch(void)2482 maybe_add_nop_after_section_switch (void)
2483 {
2484   if (!crtl->uses_eh_lsda
2485       || !crtl->eh.call_site_record_v[1])
2486     return false;
2487   int n = vec_safe_length (crtl->eh.call_site_record_v[1]);
2488   hash_set<rtx_insn *> visited;
2489 
2490   for (int i = 0; i < n; ++i)
2491     {
2492       struct call_site_record_d *cs
2493 	 = (*crtl->eh.call_site_record_v[1])[i];
2494       if (cs->landing_pad)
2495 	{
2496 	  rtx_insn *insn = as_a <rtx_insn *> (cs->landing_pad);
2497 	  while (true)
2498 	    {
2499 	      /* Landing pads have LABEL_PRESERVE_P flag set.  This check make
2500 		 sure that we do not walk past landing pad visited earlier
2501 		 which would result in possible quadratic behaviour.  */
2502 	      if (LABEL_P (insn) && LABEL_PRESERVE_P (insn)
2503 		  && visited.add (insn))
2504 		break;
2505 
2506 	      /* Conservatively assume that ASM insn may be empty.  We have
2507 		 now way to tell what they contain.  */
2508 	      if (active_insn_p (insn)
2509 		  && GET_CODE (PATTERN (insn)) != ASM_INPUT
2510 		  && GET_CODE (PATTERN (insn)) != ASM_OPERANDS)
2511 		break;
2512 
2513 	      /* If we reached the start of hot section, then NOP will be
2514 		 needed.  */
2515 	      if (GET_CODE (insn) == NOTE
2516 		  && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2517 		{
2518 		  emit_insn_after (gen_nop (), insn);
2519 		  break;
2520 		}
2521 
2522 	      /* We visit only labels from cold section.  We should never hit
2523 		 begining of the insn stream here.  */
2524 	      insn = PREV_INSN (insn);
2525 	    }
2526 	}
2527     }
2528   return false;
2529 }
2530 
2531 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2532    The new note numbers will not refer to region numbers, but
2533    instead to call site entries.  */
2534 
2535 static unsigned int
convert_to_eh_region_ranges(void)2536 convert_to_eh_region_ranges (void)
2537 {
2538   rtx insn;
2539   rtx_insn *iter;
2540   rtx_note *note;
2541   action_hash_type ar_hash (31);
2542   int last_action = -3;
2543   rtx_insn *last_action_insn = NULL;
2544   rtx last_landing_pad = NULL_RTX;
2545   rtx_insn *first_no_action_insn = NULL;
2546   int call_site = 0;
2547   int cur_sec = 0;
2548   rtx_insn *section_switch_note = NULL;
2549   rtx_insn *first_no_action_insn_before_switch = NULL;
2550   rtx_insn *last_no_action_insn_before_switch = NULL;
2551   int saved_call_site_base = call_site_base;
2552 
2553   vec_alloc (crtl->eh.action_record_data, 64);
2554 
2555   for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2556     if (INSN_P (iter))
2557       {
2558 	eh_landing_pad lp;
2559 	eh_region region;
2560 	bool nothrow;
2561 	int this_action;
2562 	rtx_code_label *this_landing_pad;
2563 
2564 	insn = iter;
2565 	if (NONJUMP_INSN_P (insn)
2566 	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
2567 	  insn = XVECEXP (PATTERN (insn), 0, 0);
2568 
2569 	nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2570 	if (nothrow)
2571 	  continue;
2572 	if (region)
2573 	  this_action = collect_one_action_chain (&ar_hash, region);
2574 	else
2575 	  this_action = -1;
2576 
2577 	/* Existence of catch handlers, or must-not-throw regions
2578 	   implies that an lsda is needed (even if empty).  */
2579 	if (this_action != -1)
2580 	  crtl->uses_eh_lsda = 1;
2581 
2582 	/* Delay creation of region notes for no-action regions
2583 	   until we're sure that an lsda will be required.  */
2584 	else if (last_action == -3)
2585 	  {
2586 	    first_no_action_insn = iter;
2587 	    last_action = -1;
2588 	  }
2589 
2590 	if (this_action >= 0)
2591 	  this_landing_pad = lp->landing_pad;
2592 	else
2593 	  this_landing_pad = NULL;
2594 
2595 	/* Differing actions or landing pads implies a change in call-site
2596 	   info, which implies some EH_REGION note should be emitted.  */
2597 	if (last_action != this_action
2598 	    || last_landing_pad != this_landing_pad)
2599 	  {
2600 	    /* If there is a queued no-action region in the other section
2601 	       with hot/cold partitioning, emit it now.  */
2602 	    if (first_no_action_insn_before_switch)
2603 	      {
2604 		gcc_assert (this_action != -1
2605 			    && last_action == (first_no_action_insn
2606 					       ? -1 : -3));
2607 		call_site = add_call_site (NULL_RTX, 0, 0);
2608 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2609 					 first_no_action_insn_before_switch);
2610 		NOTE_EH_HANDLER (note) = call_site;
2611 		note
2612 		  = emit_note_eh_region_end (last_no_action_insn_before_switch);
2613 		NOTE_EH_HANDLER (note) = call_site;
2614 		gcc_assert (last_action != -3
2615 			    || (last_action_insn
2616 				== last_no_action_insn_before_switch));
2617 		first_no_action_insn_before_switch = NULL;
2618 		last_no_action_insn_before_switch = NULL;
2619 		call_site_base++;
2620 	      }
2621 	    /* If we'd not seen a previous action (-3) or the previous
2622 	       action was must-not-throw (-2), then we do not need an
2623 	       end note.  */
2624 	    if (last_action >= -1)
2625 	      {
2626 		/* If we delayed the creation of the begin, do it now.  */
2627 		if (first_no_action_insn)
2628 		  {
2629 		    call_site = add_call_site (NULL_RTX, 0, cur_sec);
2630 		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2631 					     first_no_action_insn);
2632 		    NOTE_EH_HANDLER (note) = call_site;
2633 		    first_no_action_insn = NULL;
2634 		  }
2635 
2636 		note = emit_note_eh_region_end (last_action_insn);
2637 		NOTE_EH_HANDLER (note) = call_site;
2638 	      }
2639 
2640 	    /* If the new action is must-not-throw, then no region notes
2641 	       are created.  */
2642 	    if (this_action >= -1)
2643 	      {
2644 		call_site = add_call_site (this_landing_pad,
2645 					   this_action < 0 ? 0 : this_action,
2646 					   cur_sec);
2647 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2648 		NOTE_EH_HANDLER (note) = call_site;
2649 	      }
2650 
2651 	    last_action = this_action;
2652 	    last_landing_pad = this_landing_pad;
2653 	  }
2654 	last_action_insn = iter;
2655       }
2656     else if (NOTE_P (iter)
2657 	     && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2658       {
2659 	gcc_assert (section_switch_note == NULL_RTX);
2660 	gcc_assert (flag_reorder_blocks_and_partition);
2661 	section_switch_note = iter;
2662 	if (first_no_action_insn)
2663 	  {
2664 	    first_no_action_insn_before_switch = first_no_action_insn;
2665 	    last_no_action_insn_before_switch = last_action_insn;
2666 	    first_no_action_insn = NULL;
2667 	    gcc_assert (last_action == -1);
2668 	    last_action = -3;
2669 	  }
2670 	/* Force closing of current EH region before section switch and
2671 	   opening a new one afterwards.  */
2672 	else if (last_action != -3)
2673 	  last_landing_pad = pc_rtx;
2674 	if (crtl->eh.call_site_record_v[cur_sec])
2675 	  call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2676 	cur_sec++;
2677 	gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2678 	vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2679       }
2680 
2681   if (last_action >= -1 && ! first_no_action_insn)
2682     {
2683       note = emit_note_eh_region_end (last_action_insn);
2684       NOTE_EH_HANDLER (note) = call_site;
2685     }
2686 
2687   call_site_base = saved_call_site_base;
2688 
2689   return 0;
2690 }
2691 
2692 namespace {
2693 
2694 const pass_data pass_data_convert_to_eh_region_ranges =
2695 {
2696   RTL_PASS, /* type */
2697   "eh_ranges", /* name */
2698   OPTGROUP_NONE, /* optinfo_flags */
2699   TV_NONE, /* tv_id */
2700   0, /* properties_required */
2701   0, /* properties_provided */
2702   0, /* properties_destroyed */
2703   0, /* todo_flags_start */
2704   0, /* todo_flags_finish */
2705 };
2706 
2707 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2708 {
2709 public:
pass_convert_to_eh_region_ranges(gcc::context * ctxt)2710   pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2711     : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2712   {}
2713 
2714   /* opt_pass methods: */
2715   virtual bool gate (function *);
execute(function *)2716   virtual unsigned int execute (function *)
2717     {
2718       int ret = convert_to_eh_region_ranges ();
2719       maybe_add_nop_after_section_switch ();
2720       return ret;
2721     }
2722 
2723 }; // class pass_convert_to_eh_region_ranges
2724 
2725 bool
gate(function *)2726 pass_convert_to_eh_region_ranges::gate (function *)
2727 {
2728   /* Nothing to do for SJLJ exceptions or if no regions created.  */
2729   if (cfun->eh->region_tree == NULL)
2730     return false;
2731   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2732     return false;
2733   return true;
2734 }
2735 
2736 } // anon namespace
2737 
2738 rtl_opt_pass *
make_pass_convert_to_eh_region_ranges(gcc::context * ctxt)2739 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2740 {
2741   return new pass_convert_to_eh_region_ranges (ctxt);
2742 }
2743 
2744 static void
push_uleb128(vec<uchar,va_gc> ** data_area,unsigned int value)2745 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2746 {
2747   do
2748     {
2749       unsigned char byte = value & 0x7f;
2750       value >>= 7;
2751       if (value)
2752 	byte |= 0x80;
2753       vec_safe_push (*data_area, byte);
2754     }
2755   while (value);
2756 }
2757 
2758 static void
push_sleb128(vec<uchar,va_gc> ** data_area,int value)2759 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2760 {
2761   unsigned char byte;
2762   int more;
2763 
2764   do
2765     {
2766       byte = value & 0x7f;
2767       value >>= 7;
2768       more = ! ((value == 0 && (byte & 0x40) == 0)
2769 		|| (value == -1 && (byte & 0x40) != 0));
2770       if (more)
2771 	byte |= 0x80;
2772       vec_safe_push (*data_area, byte);
2773     }
2774   while (more);
2775 }
2776 
2777 
2778 static int
dw2_size_of_call_site_table(int section)2779 dw2_size_of_call_site_table (int section)
2780 {
2781   int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2782   int size = n * (4 + 4 + 4);
2783   int i;
2784 
2785   for (i = 0; i < n; ++i)
2786     {
2787       struct call_site_record_d *cs =
2788 	(*crtl->eh.call_site_record_v[section])[i];
2789       size += size_of_uleb128 (cs->action);
2790     }
2791 
2792   return size;
2793 }
2794 
2795 static int
sjlj_size_of_call_site_table(void)2796 sjlj_size_of_call_site_table (void)
2797 {
2798   int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2799   int size = 0;
2800   int i;
2801 
2802   for (i = 0; i < n; ++i)
2803     {
2804       struct call_site_record_d *cs =
2805 	(*crtl->eh.call_site_record_v[0])[i];
2806       size += size_of_uleb128 (INTVAL (cs->landing_pad));
2807       size += size_of_uleb128 (cs->action);
2808     }
2809 
2810   return size;
2811 }
2812 
2813 static void
dw2_output_call_site_table(int cs_format,int section)2814 dw2_output_call_site_table (int cs_format, int section)
2815 {
2816   int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2817   int i;
2818   const char *begin;
2819 
2820   if (section == 0)
2821     begin = current_function_func_begin_label;
2822   else if (first_function_block_is_cold)
2823     begin = crtl->subsections.hot_section_label;
2824   else
2825     begin = crtl->subsections.cold_section_label;
2826 
2827   for (i = 0; i < n; ++i)
2828     {
2829       struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2830       char reg_start_lab[32];
2831       char reg_end_lab[32];
2832       char landing_pad_lab[32];
2833 
2834       ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2835       ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2836 
2837       if (cs->landing_pad)
2838 	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2839 				     CODE_LABEL_NUMBER (cs->landing_pad));
2840 
2841       /* ??? Perhaps use insn length scaling if the assembler supports
2842 	 generic arithmetic.  */
2843       /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2844 	 data4 if the function is small enough.  */
2845       if (cs_format == DW_EH_PE_uleb128)
2846 	{
2847 	  dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2848 					"region %d start", i);
2849 	  dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2850 					"length");
2851 	  if (cs->landing_pad)
2852 	    dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2853 					  "landing pad");
2854 	  else
2855 	    dw2_asm_output_data_uleb128 (0, "landing pad");
2856 	}
2857       else
2858 	{
2859 	  dw2_asm_output_delta (4, reg_start_lab, begin,
2860 				"region %d start", i);
2861 	  dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2862 	  if (cs->landing_pad)
2863 	    dw2_asm_output_delta (4, landing_pad_lab, begin,
2864 				  "landing pad");
2865 	  else
2866 	    dw2_asm_output_data (4, 0, "landing pad");
2867 	}
2868       dw2_asm_output_data_uleb128 (cs->action, "action");
2869     }
2870 
2871   call_site_base += n;
2872 }
2873 
2874 static void
sjlj_output_call_site_table(void)2875 sjlj_output_call_site_table (void)
2876 {
2877   int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2878   int i;
2879 
2880   for (i = 0; i < n; ++i)
2881     {
2882       struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2883 
2884       dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2885 				   "region %d landing pad", i);
2886       dw2_asm_output_data_uleb128 (cs->action, "action");
2887     }
2888 
2889   call_site_base += n;
2890 }
2891 
2892 /* Switch to the section that should be used for exception tables.  */
2893 
2894 static void
switch_to_exception_section(const char * ARG_UNUSED (fnname))2895 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2896 {
2897   section *s;
2898 
2899   if (exception_section)
2900     s = exception_section;
2901   else
2902     {
2903       int flags;
2904 
2905       if (EH_TABLES_CAN_BE_READ_ONLY)
2906 	{
2907 	  int tt_format =
2908 	    ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2909 	  flags = ((! flag_pic
2910 		    || ((tt_format & 0x70) != DW_EH_PE_absptr
2911 			&& (tt_format & 0x70) != DW_EH_PE_aligned))
2912 		   ? 0 : SECTION_WRITE);
2913 	}
2914       else
2915 	flags = SECTION_WRITE;
2916 
2917       /* Compute the section and cache it into exception_section,
2918 	 unless it depends on the function name.  */
2919       if (targetm_common.have_named_sections)
2920 	{
2921 #ifdef HAVE_LD_EH_GC_SECTIONS
2922 	  if (flag_function_sections
2923 	      || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2924 	    {
2925 	      char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2926 	      /* The EH table must match the code section, so only mark
2927 		 it linkonce if we have COMDAT groups to tie them together.  */
2928 	      if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2929 		flags |= SECTION_LINKONCE;
2930 	      sprintf (section_name, ".gcc_except_table.%s", fnname);
2931 	      s = get_section (section_name, flags, current_function_decl);
2932 	      free (section_name);
2933 	    }
2934 	  else
2935 #endif
2936 	    exception_section
2937 	      = s = get_section (".gcc_except_table", flags, NULL);
2938 	}
2939       else
2940 	exception_section
2941 	  = s = flags == SECTION_WRITE ? data_section : readonly_data_section;
2942     }
2943 
2944   switch_to_section (s);
2945 }
2946 
2947 /* Output a reference from an exception table to the type_info object TYPE.
2948    TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2949    the value.  */
2950 
2951 static void
output_ttype(tree type,int tt_format,int tt_format_size)2952 output_ttype (tree type, int tt_format, int tt_format_size)
2953 {
2954   rtx value;
2955   bool is_public = true;
2956 
2957   if (type == NULL_TREE)
2958     value = const0_rtx;
2959   else
2960     {
2961       /* FIXME lto.  pass_ipa_free_lang_data changes all types to
2962 	 runtime types so TYPE should already be a runtime type
2963 	 reference.  When pass_ipa_free_lang data is made a default
2964 	 pass, we can then remove the call to lookup_type_for_runtime
2965 	 below.  */
2966       if (TYPE_P (type))
2967 	type = lookup_type_for_runtime (type);
2968 
2969       value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2970 
2971       /* Let cgraph know that the rtti decl is used.  Not all of the
2972 	 paths below go through assemble_integer, which would take
2973 	 care of this for us.  */
2974       STRIP_NOPS (type);
2975       if (TREE_CODE (type) == ADDR_EXPR)
2976 	{
2977 	  type = TREE_OPERAND (type, 0);
2978 	  if (VAR_P (type))
2979 	    is_public = TREE_PUBLIC (type);
2980 	}
2981       else
2982 	gcc_assert (TREE_CODE (type) == INTEGER_CST);
2983     }
2984 
2985   /* Allow the target to override the type table entry format.  */
2986   if (targetm.asm_out.ttype (value))
2987     return;
2988 
2989   if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2990     assemble_integer (value, tt_format_size,
2991 		      tt_format_size * BITS_PER_UNIT, 1);
2992   else
2993     dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2994 }
2995 
2996 /* Output an exception table for the current function according to SECTION.
2997 
2998    If the function has been partitioned into hot and cold parts, value 0 for
2999    SECTION refers to the table associated with the hot part while value 1
3000    refers to the table associated with the cold part.  If the function has
3001    not been partitioned, value 0 refers to the single exception table.  */
3002 
3003 static void
output_one_function_exception_table(int section)3004 output_one_function_exception_table (int section)
3005 {
3006   int tt_format, cs_format, lp_format, i;
3007   char ttype_label[32];
3008   char cs_after_size_label[32];
3009   char cs_end_label[32];
3010   int call_site_len;
3011   int have_tt_data;
3012   int tt_format_size = 0;
3013 
3014   have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
3015 		  || (targetm.arm_eabi_unwinder
3016 		      ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
3017 		      : vec_safe_length (cfun->eh->ehspec_data.other)));
3018 
3019   /* Indicate the format of the @TType entries.  */
3020   if (! have_tt_data)
3021     tt_format = DW_EH_PE_omit;
3022   else
3023     {
3024       tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3025       if (HAVE_AS_LEB128)
3026 	ASM_GENERATE_INTERNAL_LABEL (ttype_label,
3027 				     section ? "LLSDATTC" : "LLSDATT",
3028 				     current_function_funcdef_no);
3029 
3030       tt_format_size = size_of_encoded_value (tt_format);
3031 
3032       assemble_align (tt_format_size * BITS_PER_UNIT);
3033     }
3034 
3035   targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
3036 				  current_function_funcdef_no);
3037 
3038   /* The LSDA header.  */
3039 
3040   /* Indicate the format of the landing pad start pointer.  An omitted
3041      field implies @LPStart == @Start.  */
3042   /* Currently we always put @LPStart == @Start.  This field would
3043      be most useful in moving the landing pads completely out of
3044      line to another section, but it could also be used to minimize
3045      the size of uleb128 landing pad offsets.  */
3046   lp_format = DW_EH_PE_omit;
3047   dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3048 		       eh_data_format_name (lp_format));
3049 
3050   /* @LPStart pointer would go here.  */
3051 
3052   dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3053 		       eh_data_format_name (tt_format));
3054 
3055   if (!HAVE_AS_LEB128)
3056     {
3057       if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3058 	call_site_len = sjlj_size_of_call_site_table ();
3059       else
3060 	call_site_len = dw2_size_of_call_site_table (section);
3061     }
3062 
3063   /* A pc-relative 4-byte displacement to the @TType data.  */
3064   if (have_tt_data)
3065     {
3066       if (HAVE_AS_LEB128)
3067 	{
3068 	  char ttype_after_disp_label[32];
3069 	  ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3070 				       section ? "LLSDATTDC" : "LLSDATTD",
3071 				       current_function_funcdef_no);
3072 	  dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3073 					"@TType base offset");
3074 	  ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3075 	}
3076       else
3077 	{
3078 	  /* Ug.  Alignment queers things.  */
3079 	  unsigned int before_disp, after_disp, last_disp, disp;
3080 
3081 	  before_disp = 1 + 1;
3082 	  after_disp = (1 + size_of_uleb128 (call_site_len)
3083 			+ call_site_len
3084 			+ vec_safe_length (crtl->eh.action_record_data)
3085 			+ (vec_safe_length (cfun->eh->ttype_data)
3086 			   * tt_format_size));
3087 
3088 	  disp = after_disp;
3089 	  do
3090 	    {
3091 	      unsigned int disp_size, pad;
3092 
3093 	      last_disp = disp;
3094 	      disp_size = size_of_uleb128 (disp);
3095 	      pad = before_disp + disp_size + after_disp;
3096 	      if (pad % tt_format_size)
3097 		pad = tt_format_size - (pad % tt_format_size);
3098 	      else
3099 		pad = 0;
3100 	      disp = after_disp + pad;
3101 	    }
3102 	  while (disp != last_disp);
3103 
3104 	  dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3105 	}
3106 	}
3107 
3108   /* Indicate the format of the call-site offsets.  */
3109   if (HAVE_AS_LEB128)
3110     cs_format = DW_EH_PE_uleb128;
3111   else
3112     cs_format = DW_EH_PE_udata4;
3113 
3114   dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3115 		       eh_data_format_name (cs_format));
3116 
3117   if (HAVE_AS_LEB128)
3118     {
3119       ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3120 				   section ? "LLSDACSBC" : "LLSDACSB",
3121 				   current_function_funcdef_no);
3122       ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3123 				   section ? "LLSDACSEC" : "LLSDACSE",
3124 				   current_function_funcdef_no);
3125       dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3126 				    "Call-site table length");
3127       ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3128       if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3129 	sjlj_output_call_site_table ();
3130       else
3131 	dw2_output_call_site_table (cs_format, section);
3132       ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3133     }
3134   else
3135     {
3136       dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3137       if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3138 	sjlj_output_call_site_table ();
3139       else
3140 	dw2_output_call_site_table (cs_format, section);
3141     }
3142 
3143   /* ??? Decode and interpret the data for flag_debug_asm.  */
3144   {
3145     uchar uc;
3146     FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3147       dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3148   }
3149 
3150   if (have_tt_data)
3151     assemble_align (tt_format_size * BITS_PER_UNIT);
3152 
3153   i = vec_safe_length (cfun->eh->ttype_data);
3154   while (i-- > 0)
3155     {
3156       tree type = (*cfun->eh->ttype_data)[i];
3157       output_ttype (type, tt_format, tt_format_size);
3158     }
3159 
3160   if (HAVE_AS_LEB128 && have_tt_data)
3161     ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3162 
3163   /* ??? Decode and interpret the data for flag_debug_asm.  */
3164   if (targetm.arm_eabi_unwinder)
3165     {
3166       tree type;
3167       for (i = 0;
3168 	   vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3169 	output_ttype (type, tt_format, tt_format_size);
3170     }
3171   else
3172     {
3173       uchar uc;
3174       for (i = 0;
3175 	   vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3176 	dw2_asm_output_data (1, uc,
3177 			     i ? NULL : "Exception specification table");
3178     }
3179 }
3180 
3181 /* Output an exception table for the current function according to SECTION,
3182    switching back and forth from the function section appropriately.
3183 
3184    If the function has been partitioned into hot and cold parts, value 0 for
3185    SECTION refers to the table associated with the hot part while value 1
3186    refers to the table associated with the cold part.  If the function has
3187    not been partitioned, value 0 refers to the single exception table.  */
3188 
3189 void
output_function_exception_table(int section)3190 output_function_exception_table (int section)
3191 {
3192   const char *fnname = get_fnname_from_decl (current_function_decl);
3193   rtx personality = get_personality_function (current_function_decl);
3194 
3195   /* Not all functions need anything.  */
3196   if (!crtl->uses_eh_lsda
3197       || targetm_common.except_unwind_info (&global_options) == UI_NONE)
3198     return;
3199 
3200   /* No need to emit any boilerplate stuff for the cold part.  */
3201   if (section == 1 && !crtl->eh.call_site_record_v[1])
3202     return;
3203 
3204   if (personality)
3205     {
3206       assemble_external_libcall (personality);
3207 
3208       if (targetm.asm_out.emit_except_personality)
3209 	targetm.asm_out.emit_except_personality (personality);
3210     }
3211 
3212   switch_to_exception_section (fnname);
3213 
3214   /* If the target wants a label to begin the table, emit it here.  */
3215   targetm.asm_out.emit_except_table_label (asm_out_file);
3216 
3217   /* Do the real work.  */
3218   output_one_function_exception_table (section);
3219 
3220   switch_to_section (current_function_section ());
3221 }
3222 
3223 void
set_eh_throw_stmt_table(function * fun,hash_map<gimple *,int> * table)3224 set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
3225 {
3226   fun->eh->throw_stmt_table = table;
3227 }
3228 
3229 hash_map<gimple *, int> *
get_eh_throw_stmt_table(struct function * fun)3230 get_eh_throw_stmt_table (struct function *fun)
3231 {
3232   return fun->eh->throw_stmt_table;
3233 }
3234 
3235 /* Determine if the function needs an EH personality function.  */
3236 
3237 enum eh_personality_kind
function_needs_eh_personality(struct function * fn)3238 function_needs_eh_personality (struct function *fn)
3239 {
3240   enum eh_personality_kind kind = eh_personality_none;
3241   eh_region i;
3242 
3243   FOR_ALL_EH_REGION_FN (i, fn)
3244     {
3245       switch (i->type)
3246 	{
3247 	case ERT_CLEANUP:
3248 	  /* Can do with any personality including the generic C one.  */
3249 	  kind = eh_personality_any;
3250 	  break;
3251 
3252 	case ERT_TRY:
3253 	case ERT_ALLOWED_EXCEPTIONS:
3254 	  /* Always needs a EH personality function.  The generic C
3255 	     personality doesn't handle these even for empty type lists.  */
3256 	  return eh_personality_lang;
3257 
3258 	case ERT_MUST_NOT_THROW:
3259 	  /* Always needs a EH personality function.  The language may specify
3260 	     what abort routine that must be used, e.g. std::terminate.  */
3261 	  return eh_personality_lang;
3262 	}
3263     }
3264 
3265   return kind;
3266 }
3267 
3268 /* Dump EH information to OUT.  */
3269 
3270 void
dump_eh_tree(FILE * out,struct function * fun)3271 dump_eh_tree (FILE * out, struct function *fun)
3272 {
3273   eh_region i;
3274   int depth = 0;
3275   static const char *const type_name[] = {
3276     "cleanup", "try", "allowed_exceptions", "must_not_throw"
3277   };
3278 
3279   i = fun->eh->region_tree;
3280   if (!i)
3281     return;
3282 
3283   fprintf (out, "Eh tree:\n");
3284   while (1)
3285     {
3286       fprintf (out, "  %*s %i %s", depth * 2, "",
3287 	       i->index, type_name[(int) i->type]);
3288 
3289       if (i->landing_pads)
3290 	{
3291 	  eh_landing_pad lp;
3292 
3293 	  fprintf (out, " land:");
3294 	  if (current_ir_type () == IR_GIMPLE)
3295 	    {
3296 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3297 		{
3298 		  fprintf (out, "{%i,", lp->index);
3299 		  print_generic_expr (out, lp->post_landing_pad);
3300 		  fputc ('}', out);
3301 		  if (lp->next_lp)
3302 		    fputc (',', out);
3303 		}
3304 	    }
3305 	  else
3306 	    {
3307 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3308 		{
3309 		  fprintf (out, "{%i,", lp->index);
3310 		  if (lp->landing_pad)
3311 		    fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3312 			     NOTE_P (lp->landing_pad) ? "(del)" : "");
3313 		  else
3314 		    fprintf (out, "(nil),");
3315 		  if (lp->post_landing_pad)
3316 		    {
3317 		      rtx_insn *lab = label_rtx (lp->post_landing_pad);
3318 		      fprintf (out, "%i%s}", INSN_UID (lab),
3319 			       NOTE_P (lab) ? "(del)" : "");
3320 		    }
3321 		  else
3322 		    fprintf (out, "(nil)}");
3323 		  if (lp->next_lp)
3324 		    fputc (',', out);
3325 		}
3326 	    }
3327 	}
3328 
3329       switch (i->type)
3330 	{
3331 	case ERT_CLEANUP:
3332 	case ERT_MUST_NOT_THROW:
3333 	  break;
3334 
3335 	case ERT_TRY:
3336 	  {
3337 	    eh_catch c;
3338 	    fprintf (out, " catch:");
3339 	    for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3340 	      {
3341 		fputc ('{', out);
3342 		if (c->label)
3343 		  {
3344 		    fprintf (out, "lab:");
3345 		    print_generic_expr (out, c->label);
3346 		    fputc (';', out);
3347 		  }
3348 		print_generic_expr (out, c->type_list);
3349 		fputc ('}', out);
3350 		if (c->next_catch)
3351 		  fputc (',', out);
3352 	      }
3353 	  }
3354 	  break;
3355 
3356 	case ERT_ALLOWED_EXCEPTIONS:
3357 	  fprintf (out, " filter :%i types:", i->u.allowed.filter);
3358 	  print_generic_expr (out, i->u.allowed.type_list);
3359 	  break;
3360 	}
3361       fputc ('\n', out);
3362 
3363       /* If there are sub-regions, process them.  */
3364       if (i->inner)
3365 	i = i->inner, depth++;
3366       /* If there are peers, process them.  */
3367       else if (i->next_peer)
3368 	i = i->next_peer;
3369       /* Otherwise, step back up the tree to the next peer.  */
3370       else
3371 	{
3372 	  do
3373 	    {
3374 	      i = i->outer;
3375 	      depth--;
3376 	      if (i == NULL)
3377 		return;
3378 	    }
3379 	  while (i->next_peer == NULL);
3380 	  i = i->next_peer;
3381 	}
3382     }
3383 }
3384 
3385 /* Dump the EH tree for FN on stderr.  */
3386 
3387 DEBUG_FUNCTION void
debug_eh_tree(struct function * fn)3388 debug_eh_tree (struct function *fn)
3389 {
3390   dump_eh_tree (stderr, fn);
3391 }
3392 
3393 /* Verify invariants on EH datastructures.  */
3394 
3395 DEBUG_FUNCTION void
verify_eh_tree(struct function * fun)3396 verify_eh_tree (struct function *fun)
3397 {
3398   eh_region r, outer;
3399   int nvisited_lp, nvisited_r;
3400   int count_lp, count_r, depth, i;
3401   eh_landing_pad lp;
3402   bool err = false;
3403 
3404   if (!fun->eh->region_tree)
3405     return;
3406 
3407   count_r = 0;
3408   for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3409     if (r)
3410       {
3411 	if (r->index == i)
3412 	  count_r++;
3413 	else
3414 	  {
3415 	    error ("region_array is corrupted for region %i", r->index);
3416 	    err = true;
3417 	  }
3418       }
3419 
3420   count_lp = 0;
3421   for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3422     if (lp)
3423       {
3424 	if (lp->index == i)
3425 	  count_lp++;
3426 	else
3427 	  {
3428 	    error ("lp_array is corrupted for lp %i", lp->index);
3429 	    err = true;
3430 	  }
3431       }
3432 
3433   depth = nvisited_lp = nvisited_r = 0;
3434   outer = NULL;
3435   r = fun->eh->region_tree;
3436   while (1)
3437     {
3438       if ((*fun->eh->region_array)[r->index] != r)
3439 	{
3440 	  error ("region_array is corrupted for region %i", r->index);
3441 	  err = true;
3442 	}
3443       if (r->outer != outer)
3444 	{
3445 	  error ("outer block of region %i is wrong", r->index);
3446 	  err = true;
3447 	}
3448       if (depth < 0)
3449 	{
3450 	  error ("negative nesting depth of region %i", r->index);
3451 	  err = true;
3452 	}
3453       nvisited_r++;
3454 
3455       for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3456 	{
3457 	  if ((*fun->eh->lp_array)[lp->index] != lp)
3458 	    {
3459 	      error ("lp_array is corrupted for lp %i", lp->index);
3460 	      err = true;
3461 	    }
3462 	  if (lp->region != r)
3463 	    {
3464 	      error ("region of lp %i is wrong", lp->index);
3465 	      err = true;
3466 	    }
3467 	  nvisited_lp++;
3468 	}
3469 
3470       if (r->inner)
3471 	outer = r, r = r->inner, depth++;
3472       else if (r->next_peer)
3473 	r = r->next_peer;
3474       else
3475 	{
3476 	  do
3477 	    {
3478 	      r = r->outer;
3479 	      if (r == NULL)
3480 		goto region_done;
3481 	      depth--;
3482 	      outer = r->outer;
3483 	    }
3484 	  while (r->next_peer == NULL);
3485 	  r = r->next_peer;
3486 	}
3487     }
3488  region_done:
3489   if (depth != 0)
3490     {
3491       error ("tree list ends on depth %i", depth);
3492       err = true;
3493     }
3494   if (count_r != nvisited_r)
3495     {
3496       error ("region_array does not match region_tree");
3497       err = true;
3498     }
3499   if (count_lp != nvisited_lp)
3500     {
3501       error ("lp_array does not match region_tree");
3502       err = true;
3503     }
3504 
3505   if (err)
3506     {
3507       dump_eh_tree (stderr, fun);
3508       internal_error ("verify_eh_tree failed");
3509     }
3510 }
3511 
3512 #include "gt-except.h"
3513