1 /* Implements exception handling.
2    Copyright (C) 1989-2020 Free Software Foundation, Inc.
3    Contributed by Mike Stump <mrs@cygnus.com>.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 
22 /* An exception is an event that can be "thrown" from within a
23    function.  This event can then be "caught" by the callers of
24    the function.
25 
26    The representation of exceptions changes several times during
27    the compilation process:
28 
29    In the beginning, in the front end, we have the GENERIC trees
30    TRY_CATCH_EXPR, TRY_FINALLY_EXPR, EH_ELSE_EXPR, WITH_CLEANUP_EXPR,
31    CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32 
33    During initial gimplification (gimplify.c) these are lowered to the
34    GIMPLE_TRY, GIMPLE_CATCH, GIMPLE_EH_ELSE, and GIMPLE_EH_FILTER
35    nodes.  The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are
36    converted into GIMPLE_TRY_FINALLY nodes; the others are a more
37    direct 1-1 conversion.
38 
39    During pass_lower_eh (tree-eh.c) we record the nested structure
40    of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41    We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42    regions at this time.  We can then flatten the statements within
43    the TRY nodes to straight-line code.  Statements that had been within
44    TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45    so that we may remember what action is supposed to be taken if
46    a given statement does throw.  During this lowering process,
47    we create an EH_LANDING_PAD node for each EH_REGION that has
48    some code within the function that needs to be executed if a
49    throw does happen.  We also create RESX statements that are
50    used to transfer control from an inner EH_REGION to an outer
51    EH_REGION.  We also create EH_DISPATCH statements as placeholders
52    for a runtime type comparison that should be made in order to
53    select the action to perform among different CATCH and EH_FILTER
54    regions.
55 
56    During pass_lower_eh_dispatch (tree-eh.c), which is run after
57    all inlining is complete, we are able to run assign_filter_values,
58    which allows us to map the set of types manipulated by all of the
59    CATCH and EH_FILTER regions to a set of integers.  This set of integers
60    will be how the exception runtime communicates with the code generated
61    within the function.  We then expand the GIMPLE_EH_DISPATCH statements
62    to a switch or conditional branches that use the argument provided by
63    the runtime (__builtin_eh_filter) and the set of integers we computed
64    in assign_filter_values.
65 
66    During pass_lower_resx (tree-eh.c), which is run near the end
67    of optimization, we expand RESX statements.  If the eh region
68    that is outer to the RESX statement is a MUST_NOT_THROW, then
69    the RESX expands to some form of abort statement.  If the eh
70    region that is outer to the RESX statement is within the current
71    function, then the RESX expands to a bookkeeping call
72    (__builtin_eh_copy_values) and a goto.  Otherwise, the next
73    handler for the exception must be within a function somewhere
74    up the call chain, so we call back into the exception runtime
75    (__builtin_unwind_resume).
76 
77    During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78    that create an rtl to eh_region mapping that corresponds to the
79    gimple to eh_region mapping that had been recorded in the
80    THROW_STMT_TABLE.
81 
82    Then, via finish_eh_generation, we generate the real landing pads
83    to which the runtime will actually transfer control.  These new
84    landing pads perform whatever bookkeeping is needed by the target
85    backend in order to resume execution within the current function.
86    Each of these new landing pads falls through into the post_landing_pad
87    label which had been used within the CFG up to this point.  All
88    exception edges within the CFG are redirected to the new landing pads.
89    If the target uses setjmp to implement exceptions, the various extra
90    calls into the runtime to register and unregister the current stack
91    frame are emitted at this time.
92 
93    During pass_convert_to_eh_region_ranges (except.c), we transform
94    the REG_EH_REGION notes attached to individual insns into
95    non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96    and NOTE_INSN_EH_REGION_END.  Each insn within such ranges has the
97    same associated action within the exception region tree, meaning
98    that (1) the exception is caught by the same landing pad within the
99    current function, (2) the exception is blocked by the runtime with
100    a MUST_NOT_THROW region, or (3) the exception is not handled at all
101    within the current function.
102 
103    Finally, during assembly generation, we call
104    output_function_exception_table (except.c) to emit the tables with
105    which the exception runtime can determine if a given stack frame
106    handles a given exception, and if so what filter value to provide
107    to the function when the non-local control transfer is effected.
108    If the target uses dwarf2 unwinding to implement exceptions, then
109    output_call_frame_info (dwarf2out.c) emits the required unwind data.  */
110 
111 
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "backend.h"
116 #include "target.h"
117 #include "rtl.h"
118 #include "tree.h"
119 #include "cfghooks.h"
120 #include "tree-pass.h"
121 #include "memmodel.h"
122 #include "tm_p.h"
123 #include "stringpool.h"
124 #include "expmed.h"
125 #include "optabs.h"
126 #include "emit-rtl.h"
127 #include "cgraph.h"
128 #include "diagnostic.h"
129 #include "fold-const.h"
130 #include "stor-layout.h"
131 #include "explow.h"
132 #include "stmt.h"
133 #include "expr.h"
134 #include "calls.h"
135 #include "libfuncs.h"
136 #include "except.h"
137 #include "output.h"
138 #include "dwarf2asm.h"
139 #include "dwarf2out.h"
140 #include "common/common-target.h"
141 #include "langhooks.h"
142 #include "cfgrtl.h"
143 #include "tree-pretty-print.h"
144 #include "cfgloop.h"
145 #include "builtins.h"
146 #include "tree-hash-traits.h"
147 
148 static GTY(()) int call_site_base;
149 
150 static GTY(()) hash_map<tree_hash, tree> *type_to_runtime_map;
151 
152 static GTY(()) tree setjmp_fn;
153 
154 /* Describe the SjLj_Function_Context structure.  */
155 static GTY(()) tree sjlj_fc_type_node;
156 static int sjlj_fc_call_site_ofs;
157 static int sjlj_fc_data_ofs;
158 static int sjlj_fc_personality_ofs;
159 static int sjlj_fc_lsda_ofs;
160 static int sjlj_fc_jbuf_ofs;
161 
162 
163 struct GTY(()) call_site_record_d
164 {
165   rtx landing_pad;
166   int action;
167 };
168 
169 /* In the following structure and associated functions,
170    we represent entries in the action table as 1-based indices.
171    Special cases are:
172 
173 	 0:	null action record, non-null landing pad; implies cleanups
174 	-1:	null action record, null landing pad; implies no action
175 	-2:	no call-site entry; implies must_not_throw
176 	-3:	we have yet to process outer regions
177 
178    Further, no special cases apply to the "next" field of the record.
179    For next, 0 means end of list.  */
180 
181 struct action_record
182 {
183   int offset;
184   int filter;
185   int next;
186 };
187 
188 /* Hashtable helpers.  */
189 
190 struct action_record_hasher : free_ptr_hash <action_record>
191 {
192   static inline hashval_t hash (const action_record *);
193   static inline bool equal (const action_record *, const action_record *);
194 };
195 
196 inline hashval_t
hash(const action_record * entry)197 action_record_hasher::hash (const action_record *entry)
198 {
199   return entry->next * 1009 + entry->filter;
200 }
201 
202 inline bool
equal(const action_record * entry,const action_record * data)203 action_record_hasher::equal (const action_record *entry,
204 			     const action_record *data)
205 {
206   return entry->filter == data->filter && entry->next == data->next;
207 }
208 
209 typedef hash_table<action_record_hasher> action_hash_type;
210 
211 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
212 					   eh_landing_pad *);
213 
214 static void dw2_build_landing_pads (void);
215 
216 static int collect_one_action_chain (action_hash_type *, eh_region);
217 static int add_call_site (rtx, int, int);
218 
219 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
220 static void push_sleb128 (vec<uchar, va_gc> **, int);
221 static int dw2_size_of_call_site_table (int);
222 static int sjlj_size_of_call_site_table (void);
223 static void dw2_output_call_site_table (int, int);
224 static void sjlj_output_call_site_table (void);
225 
226 
227 void
init_eh(void)228 init_eh (void)
229 {
230   if (! flag_exceptions)
231     return;
232 
233   type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
234 
235   /* Create the SjLj_Function_Context structure.  This should match
236      the definition in unwind-sjlj.c.  */
237   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
238     {
239       tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
240 
241       sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
242 
243       f_prev = build_decl (BUILTINS_LOCATION,
244 			   FIELD_DECL, get_identifier ("__prev"),
245 			   build_pointer_type (sjlj_fc_type_node));
246       DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
247 
248       f_cs = build_decl (BUILTINS_LOCATION,
249 			 FIELD_DECL, get_identifier ("__call_site"),
250 			 integer_type_node);
251       DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
252 
253       tmp = build_index_type (size_int (4 - 1));
254       tmp = build_array_type (lang_hooks.types.type_for_mode
255 				(targetm.unwind_word_mode (), 1),
256 			      tmp);
257       f_data = build_decl (BUILTINS_LOCATION,
258 			   FIELD_DECL, get_identifier ("__data"), tmp);
259       DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
260 
261       f_per = build_decl (BUILTINS_LOCATION,
262 			  FIELD_DECL, get_identifier ("__personality"),
263 			  ptr_type_node);
264       DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
265 
266       f_lsda = build_decl (BUILTINS_LOCATION,
267 			   FIELD_DECL, get_identifier ("__lsda"),
268 			   ptr_type_node);
269       DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
270 
271 #ifdef DONT_USE_BUILTIN_SETJMP
272 #ifdef JMP_BUF_SIZE
273       tmp = size_int (JMP_BUF_SIZE - 1);
274 #else
275       /* Should be large enough for most systems, if it is not,
276 	 JMP_BUF_SIZE should be defined with the proper value.  It will
277 	 also tend to be larger than necessary for most systems, a more
278 	 optimal port will define JMP_BUF_SIZE.  */
279       tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
280 #endif
281 #else
282       /* Compute a minimally sized jump buffer.  We need room to store at
283 	 least 3 pointers - stack pointer, frame pointer and return address.
284 	 Plus for some targets we need room for an extra pointer - in the
285 	 case of MIPS this is the global pointer.  This makes a total of four
286 	 pointers, but to be safe we actually allocate room for 5.
287 
288 	 If pointers are smaller than words then we allocate enough room for
289 	 5 words, just in case the backend needs this much room.  For more
290 	 discussion on this issue see:
291 	 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html.  */
292       if (POINTER_SIZE > BITS_PER_WORD)
293 	tmp = size_int (5 - 1);
294       else
295 	tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
296 #endif
297 
298       tmp = build_index_type (tmp);
299       tmp = build_array_type (ptr_type_node, tmp);
300       f_jbuf = build_decl (BUILTINS_LOCATION,
301 			   FIELD_DECL, get_identifier ("__jbuf"), tmp);
302 #ifdef DONT_USE_BUILTIN_SETJMP
303       /* We don't know what the alignment requirements of the
304 	 runtime's jmp_buf has.  Overestimate.  */
305       SET_DECL_ALIGN (f_jbuf, BIGGEST_ALIGNMENT);
306       DECL_USER_ALIGN (f_jbuf) = 1;
307 #endif
308       DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
309 
310       TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
311       TREE_CHAIN (f_prev) = f_cs;
312       TREE_CHAIN (f_cs) = f_data;
313       TREE_CHAIN (f_data) = f_per;
314       TREE_CHAIN (f_per) = f_lsda;
315       TREE_CHAIN (f_lsda) = f_jbuf;
316 
317       layout_type (sjlj_fc_type_node);
318 
319       /* Cache the interesting field offsets so that we have
320 	 easy access from rtl.  */
321       sjlj_fc_call_site_ofs
322 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
323 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
324       sjlj_fc_data_ofs
325 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
326 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
327       sjlj_fc_personality_ofs
328 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
329 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
330       sjlj_fc_lsda_ofs
331 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
332 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
333       sjlj_fc_jbuf_ofs
334 	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
335 	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
336 
337 #ifdef DONT_USE_BUILTIN_SETJMP
338       tmp = build_function_type_list (integer_type_node, TREE_TYPE (f_jbuf),
339 				      NULL);
340       setjmp_fn = build_decl (BUILTINS_LOCATION, FUNCTION_DECL,
341 			      get_identifier ("setjmp"), tmp);
342       TREE_PUBLIC (setjmp_fn) = 1;
343       DECL_EXTERNAL (setjmp_fn) = 1;
344       DECL_ASSEMBLER_NAME (setjmp_fn);
345 #endif
346     }
347 }
348 
349 void
init_eh_for_function(void)350 init_eh_for_function (void)
351 {
352   cfun->eh = ggc_cleared_alloc<eh_status> ();
353 
354   /* Make sure zero'th entries are used.  */
355   vec_safe_push (cfun->eh->region_array, (eh_region)0);
356   vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
357 }
358 
359 /* Routines to generate the exception tree somewhat directly.
360    These are used from tree-eh.c when processing exception related
361    nodes during tree optimization.  */
362 
363 static eh_region
gen_eh_region(enum eh_region_type type,eh_region outer)364 gen_eh_region (enum eh_region_type type, eh_region outer)
365 {
366   eh_region new_eh;
367 
368   /* Insert a new blank region as a leaf in the tree.  */
369   new_eh = ggc_cleared_alloc<eh_region_d> ();
370   new_eh->type = type;
371   new_eh->outer = outer;
372   if (outer)
373     {
374       new_eh->next_peer = outer->inner;
375       outer->inner = new_eh;
376     }
377   else
378     {
379       new_eh->next_peer = cfun->eh->region_tree;
380       cfun->eh->region_tree = new_eh;
381     }
382 
383   new_eh->index = vec_safe_length (cfun->eh->region_array);
384   vec_safe_push (cfun->eh->region_array, new_eh);
385 
386   /* Copy the language's notion of whether to use __cxa_end_cleanup.  */
387   if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
388     new_eh->use_cxa_end_cleanup = true;
389 
390   return new_eh;
391 }
392 
393 eh_region
gen_eh_region_cleanup(eh_region outer)394 gen_eh_region_cleanup (eh_region outer)
395 {
396   return gen_eh_region (ERT_CLEANUP, outer);
397 }
398 
399 eh_region
gen_eh_region_try(eh_region outer)400 gen_eh_region_try (eh_region outer)
401 {
402   return gen_eh_region (ERT_TRY, outer);
403 }
404 
405 eh_catch
gen_eh_region_catch(eh_region t,tree type_or_list)406 gen_eh_region_catch (eh_region t, tree type_or_list)
407 {
408   eh_catch c, l;
409   tree type_list, type_node;
410 
411   gcc_assert (t->type == ERT_TRY);
412 
413   /* Ensure to always end up with a type list to normalize further
414      processing, then register each type against the runtime types map.  */
415   type_list = type_or_list;
416   if (type_or_list)
417     {
418       if (TREE_CODE (type_or_list) != TREE_LIST)
419 	type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
420 
421       type_node = type_list;
422       for (; type_node; type_node = TREE_CHAIN (type_node))
423 	add_type_for_runtime (TREE_VALUE (type_node));
424     }
425 
426   c = ggc_cleared_alloc<eh_catch_d> ();
427   c->type_list = type_list;
428   l = t->u.eh_try.last_catch;
429   c->prev_catch = l;
430   if (l)
431     l->next_catch = c;
432   else
433     t->u.eh_try.first_catch = c;
434   t->u.eh_try.last_catch = c;
435 
436   return c;
437 }
438 
439 eh_region
gen_eh_region_allowed(eh_region outer,tree allowed)440 gen_eh_region_allowed (eh_region outer, tree allowed)
441 {
442   eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
443   region->u.allowed.type_list = allowed;
444 
445   for (; allowed ; allowed = TREE_CHAIN (allowed))
446     add_type_for_runtime (TREE_VALUE (allowed));
447 
448   return region;
449 }
450 
451 eh_region
gen_eh_region_must_not_throw(eh_region outer)452 gen_eh_region_must_not_throw (eh_region outer)
453 {
454   return gen_eh_region (ERT_MUST_NOT_THROW, outer);
455 }
456 
457 eh_landing_pad
gen_eh_landing_pad(eh_region region)458 gen_eh_landing_pad (eh_region region)
459 {
460   eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
461 
462   lp->next_lp = region->landing_pads;
463   lp->region = region;
464   lp->index = vec_safe_length (cfun->eh->lp_array);
465   region->landing_pads = lp;
466 
467   vec_safe_push (cfun->eh->lp_array, lp);
468 
469   return lp;
470 }
471 
472 eh_region
get_eh_region_from_number_fn(struct function * ifun,int i)473 get_eh_region_from_number_fn (struct function *ifun, int i)
474 {
475   return (*ifun->eh->region_array)[i];
476 }
477 
478 eh_region
get_eh_region_from_number(int i)479 get_eh_region_from_number (int i)
480 {
481   return get_eh_region_from_number_fn (cfun, i);
482 }
483 
484 eh_landing_pad
get_eh_landing_pad_from_number_fn(struct function * ifun,int i)485 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
486 {
487   return (*ifun->eh->lp_array)[i];
488 }
489 
490 eh_landing_pad
get_eh_landing_pad_from_number(int i)491 get_eh_landing_pad_from_number (int i)
492 {
493   return get_eh_landing_pad_from_number_fn (cfun, i);
494 }
495 
496 eh_region
get_eh_region_from_lp_number_fn(struct function * ifun,int i)497 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
498 {
499   if (i < 0)
500     return (*ifun->eh->region_array)[-i];
501   else if (i == 0)
502     return NULL;
503   else
504     {
505       eh_landing_pad lp;
506       lp = (*ifun->eh->lp_array)[i];
507       return lp->region;
508     }
509 }
510 
511 eh_region
get_eh_region_from_lp_number(int i)512 get_eh_region_from_lp_number (int i)
513 {
514   return get_eh_region_from_lp_number_fn (cfun, i);
515 }
516 
517 /* Returns true if the current function has exception handling regions.  */
518 
519 bool
current_function_has_exception_handlers(void)520 current_function_has_exception_handlers (void)
521 {
522   return cfun->eh->region_tree != NULL;
523 }
524 
525 /* A subroutine of duplicate_eh_regions.  Copy the eh_region tree at OLD.
526    Root it at OUTER, and apply LP_OFFSET to the lp numbers.  */
527 
528 struct duplicate_eh_regions_data
529 {
530   duplicate_eh_regions_map label_map;
531   void *label_map_data;
532   hash_map<void *, void *> *eh_map;
533 };
534 
535 static void
duplicate_eh_regions_1(struct duplicate_eh_regions_data * data,eh_region old_r,eh_region outer)536 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
537 			eh_region old_r, eh_region outer)
538 {
539   eh_landing_pad old_lp, new_lp;
540   eh_region new_r;
541 
542   new_r = gen_eh_region (old_r->type, outer);
543   gcc_assert (!data->eh_map->put (old_r, new_r));
544 
545   switch (old_r->type)
546     {
547     case ERT_CLEANUP:
548       break;
549 
550     case ERT_TRY:
551       {
552 	eh_catch oc, nc;
553 	for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
554 	  {
555 	    /* We should be doing all our region duplication before and
556 	       during inlining, which is before filter lists are created.  */
557 	    gcc_assert (oc->filter_list == NULL);
558 	    nc = gen_eh_region_catch (new_r, oc->type_list);
559 	    nc->label = data->label_map (oc->label, data->label_map_data);
560 	  }
561       }
562       break;
563 
564     case ERT_ALLOWED_EXCEPTIONS:
565       new_r->u.allowed.type_list = old_r->u.allowed.type_list;
566       if (old_r->u.allowed.label)
567 	new_r->u.allowed.label
568 	    = data->label_map (old_r->u.allowed.label, data->label_map_data);
569       else
570 	new_r->u.allowed.label = NULL_TREE;
571       break;
572 
573     case ERT_MUST_NOT_THROW:
574       new_r->u.must_not_throw.failure_loc =
575 	LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
576       new_r->u.must_not_throw.failure_decl =
577 	old_r->u.must_not_throw.failure_decl;
578       break;
579     }
580 
581   for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
582     {
583       /* Don't bother copying unused landing pads.  */
584       if (old_lp->post_landing_pad == NULL)
585 	continue;
586 
587       new_lp = gen_eh_landing_pad (new_r);
588       gcc_assert (!data->eh_map->put (old_lp, new_lp));
589 
590       new_lp->post_landing_pad
591 	= data->label_map (old_lp->post_landing_pad, data->label_map_data);
592       EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
593     }
594 
595   /* Make sure to preserve the original use of __cxa_end_cleanup.  */
596   new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
597 
598   for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
599     duplicate_eh_regions_1 (data, old_r, new_r);
600 }
601 
602 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
603    the current function and root the tree below OUTER_REGION.
604    The special case of COPY_REGION of NULL means all regions.
605    Remap labels using MAP/MAP_DATA callback.  Return a pointer map
606    that allows the caller to remap uses of both EH regions and
607    EH landing pads.  */
608 
609 hash_map<void *, void *> *
duplicate_eh_regions(struct function * ifun,eh_region copy_region,int outer_lp,duplicate_eh_regions_map map,void * map_data)610 duplicate_eh_regions (struct function *ifun,
611 		      eh_region copy_region, int outer_lp,
612 		      duplicate_eh_regions_map map, void *map_data)
613 {
614   struct duplicate_eh_regions_data data;
615   eh_region outer_region;
616 
617   if (flag_checking)
618     verify_eh_tree (ifun);
619 
620   data.label_map = map;
621   data.label_map_data = map_data;
622   data.eh_map = new hash_map<void *, void *>;
623 
624   outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
625 
626   /* Copy all the regions in the subtree.  */
627   if (copy_region)
628     duplicate_eh_regions_1 (&data, copy_region, outer_region);
629   else
630     {
631       eh_region r;
632       for (r = ifun->eh->region_tree; r ; r = r->next_peer)
633 	duplicate_eh_regions_1 (&data, r, outer_region);
634     }
635 
636   if (flag_checking)
637     verify_eh_tree (cfun);
638 
639   return data.eh_map;
640 }
641 
642 /* Return the region that is outer to both REGION_A and REGION_B in IFUN.  */
643 
644 eh_region
eh_region_outermost(struct function * ifun,eh_region region_a,eh_region region_b)645 eh_region_outermost (struct function *ifun, eh_region region_a,
646 		     eh_region region_b)
647 {
648   gcc_assert (ifun->eh->region_array);
649   gcc_assert (ifun->eh->region_tree);
650 
651   auto_sbitmap b_outer (ifun->eh->region_array->length ());
652   bitmap_clear (b_outer);
653 
654   do
655     {
656       bitmap_set_bit (b_outer, region_b->index);
657       region_b = region_b->outer;
658     }
659   while (region_b);
660 
661   do
662     {
663       if (bitmap_bit_p (b_outer, region_a->index))
664 	break;
665       region_a = region_a->outer;
666     }
667   while (region_a);
668 
669   return region_a;
670 }
671 
672 void
add_type_for_runtime(tree type)673 add_type_for_runtime (tree type)
674 {
675   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
676   if (TREE_CODE (type) == NOP_EXPR)
677     return;
678 
679   bool existed = false;
680   tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
681   if (!existed)
682     *slot = lang_hooks.eh_runtime_type (type);
683 }
684 
685 tree
lookup_type_for_runtime(tree type)686 lookup_type_for_runtime (tree type)
687 {
688   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
689   if (TREE_CODE (type) == NOP_EXPR)
690     return type;
691 
692   /* We should have always inserted the data earlier.  */
693   return *type_to_runtime_map->get (type);
694 }
695 
696 
697 /* Represent an entry in @TTypes for either catch actions
698    or exception filter actions.  */
699 struct ttypes_filter {
700   tree t;
701   int filter;
702 };
703 
704 /* Helper for ttypes_filter hashing.  */
705 
706 struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
707 {
708   typedef tree_node *compare_type;
709   static inline hashval_t hash (const ttypes_filter *);
710   static inline bool equal (const ttypes_filter *, const tree_node *);
711 };
712 
713 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
714    (a tree) for a @TTypes type node we are thinking about adding.  */
715 
716 inline bool
equal(const ttypes_filter * entry,const tree_node * data)717 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
718 {
719   return entry->t == data;
720 }
721 
722 inline hashval_t
hash(const ttypes_filter * entry)723 ttypes_filter_hasher::hash (const ttypes_filter *entry)
724 {
725   return TREE_HASH (entry->t);
726 }
727 
728 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
729 
730 
731 /* Helper for ehspec hashing.  */
732 
733 struct ehspec_hasher : free_ptr_hash <ttypes_filter>
734 {
735   static inline hashval_t hash (const ttypes_filter *);
736   static inline bool equal (const ttypes_filter *, const ttypes_filter *);
737 };
738 
739 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
740    exception specification list we are thinking about adding.  */
741 /* ??? Currently we use the type lists in the order given.  Someone
742    should put these in some canonical order.  */
743 
744 inline bool
equal(const ttypes_filter * entry,const ttypes_filter * data)745 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
746 {
747   return type_list_equal (entry->t, data->t);
748 }
749 
750 /* Hash function for exception specification lists.  */
751 
752 inline hashval_t
hash(const ttypes_filter * entry)753 ehspec_hasher::hash (const ttypes_filter *entry)
754 {
755   hashval_t h = 0;
756   tree list;
757 
758   for (list = entry->t; list ; list = TREE_CHAIN (list))
759     h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
760   return h;
761 }
762 
763 typedef hash_table<ehspec_hasher> ehspec_hash_type;
764 
765 
766 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
767    to speed up the search.  Return the filter value to be used.  */
768 
769 static int
add_ttypes_entry(ttypes_hash_type * ttypes_hash,tree type)770 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
771 {
772   struct ttypes_filter **slot, *n;
773 
774   slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
775 					  INSERT);
776 
777   if ((n = *slot) == NULL)
778     {
779       /* Filter value is a 1 based table index.  */
780 
781       n = XNEW (struct ttypes_filter);
782       n->t = type;
783       n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
784       *slot = n;
785 
786       vec_safe_push (cfun->eh->ttype_data, type);
787     }
788 
789   return n->filter;
790 }
791 
792 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
793    to speed up the search.  Return the filter value to be used.  */
794 
795 static int
add_ehspec_entry(ehspec_hash_type * ehspec_hash,ttypes_hash_type * ttypes_hash,tree list)796 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
797 		  tree list)
798 {
799   struct ttypes_filter **slot, *n;
800   struct ttypes_filter dummy;
801 
802   dummy.t = list;
803   slot = ehspec_hash->find_slot (&dummy, INSERT);
804 
805   if ((n = *slot) == NULL)
806     {
807       int len;
808 
809       if (targetm.arm_eabi_unwinder)
810 	len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
811       else
812 	len = vec_safe_length (cfun->eh->ehspec_data.other);
813 
814       /* Filter value is a -1 based byte index into a uleb128 buffer.  */
815 
816       n = XNEW (struct ttypes_filter);
817       n->t = list;
818       n->filter = -(len + 1);
819       *slot = n;
820 
821       /* Generate a 0 terminated list of filter values.  */
822       for (; list ; list = TREE_CHAIN (list))
823 	{
824 	  if (targetm.arm_eabi_unwinder)
825 	    vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
826 	  else
827 	    {
828 	      /* Look up each type in the list and encode its filter
829 		 value as a uleb128.  */
830 	      push_uleb128 (&cfun->eh->ehspec_data.other,
831 			    add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
832 	    }
833 	}
834       if (targetm.arm_eabi_unwinder)
835 	vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
836       else
837 	vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
838     }
839 
840   return n->filter;
841 }
842 
843 /* Generate the action filter values to be used for CATCH and
844    ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
845    we use lots of landing pads, and so every type or list can share
846    the same filter value, which saves table space.  */
847 
848 void
assign_filter_values(void)849 assign_filter_values (void)
850 {
851   int i;
852   eh_region r;
853   eh_catch c;
854 
855   vec_alloc (cfun->eh->ttype_data, 16);
856   if (targetm.arm_eabi_unwinder)
857     vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
858   else
859     vec_alloc (cfun->eh->ehspec_data.other, 64);
860 
861   ehspec_hash_type ehspec (31);
862   ttypes_hash_type ttypes (31);
863 
864   for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
865     {
866       if (r == NULL)
867 	continue;
868 
869       switch (r->type)
870 	{
871 	case ERT_TRY:
872 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
873 	    {
874 	      /* Whatever type_list is (NULL or true list), we build a list
875 		 of filters for the region.  */
876 	      c->filter_list = NULL_TREE;
877 
878 	      if (c->type_list != NULL)
879 		{
880 		  /* Get a filter value for each of the types caught and store
881 		     them in the region's dedicated list.  */
882 		  tree tp_node = c->type_list;
883 
884 		  for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
885 		    {
886 		      int flt
887 		       	= add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
888 		      tree flt_node = build_int_cst (integer_type_node, flt);
889 
890 		      c->filter_list
891 			= tree_cons (NULL_TREE, flt_node, c->filter_list);
892 		    }
893 		}
894 	      else
895 		{
896 		  /* Get a filter value for the NULL list also since it
897 		     will need an action record anyway.  */
898 		  int flt = add_ttypes_entry (&ttypes, NULL);
899 		  tree flt_node = build_int_cst (integer_type_node, flt);
900 
901 		  c->filter_list
902 		    = tree_cons (NULL_TREE, flt_node, NULL);
903 		}
904 	    }
905 	  break;
906 
907 	case ERT_ALLOWED_EXCEPTIONS:
908 	  r->u.allowed.filter
909 	    = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
910 	  break;
911 
912 	default:
913 	  break;
914 	}
915     }
916 }
917 
918 /* Emit SEQ into basic block just before INSN (that is assumed to be
919    first instruction of some existing BB and return the newly
920    produced block.  */
921 static basic_block
emit_to_new_bb_before(rtx_insn * seq,rtx_insn * insn)922 emit_to_new_bb_before (rtx_insn *seq, rtx_insn *insn)
923 {
924   rtx_insn *next, *last;
925   basic_block bb;
926   edge e;
927   edge_iterator ei;
928 
929   /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
930      call), we don't want it to go into newly created landing pad or other EH
931      construct.  */
932   for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
933     if (e->flags & EDGE_FALLTHRU)
934       force_nonfallthru (e);
935     else
936       ei_next (&ei);
937 
938   /* Make sure to put the location of INSN or a subsequent instruction on SEQ
939      to avoid inheriting the location of the previous instruction.  */
940   next = insn;
941   while (next && !NONDEBUG_INSN_P (next))
942     next = NEXT_INSN (next);
943   if (next)
944     last = emit_insn_before_setloc (seq, insn, INSN_LOCATION (next));
945   else
946     last = emit_insn_before (seq, insn);
947   if (BARRIER_P (last))
948     last = PREV_INSN (last);
949   bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
950   update_bb_for_insn (bb);
951   bb->flags |= BB_SUPERBLOCK;
952   return bb;
953 }
954 
955 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
956    at the rtl level.  Emit the code required by the target at a landing
957    pad for the given region.  */
958 
959 static void
expand_dw2_landing_pad_for_region(eh_region region)960 expand_dw2_landing_pad_for_region (eh_region region)
961 {
962   if (targetm.have_exception_receiver ())
963     emit_insn (targetm.gen_exception_receiver ());
964   else if (targetm.have_nonlocal_goto_receiver ())
965     emit_insn (targetm.gen_nonlocal_goto_receiver ());
966   else
967     { /* Nothing */ }
968 
969   if (region->exc_ptr_reg)
970     emit_move_insn (region->exc_ptr_reg,
971 		    gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
972   if (region->filter_reg)
973     emit_move_insn (region->filter_reg,
974 		    gen_rtx_REG (targetm.eh_return_filter_mode (),
975 				 EH_RETURN_DATA_REGNO (1)));
976 }
977 
978 /* Expand the extra code needed at landing pads for dwarf2 unwinding.  */
979 
980 static void
dw2_build_landing_pads(void)981 dw2_build_landing_pads (void)
982 {
983   int i;
984   eh_landing_pad lp;
985   int e_flags = EDGE_FALLTHRU;
986 
987   /* If we're going to partition blocks, we need to be able to add
988      new landing pads later, which means that we need to hold on to
989      the post-landing-pad block.  Prevent it from being merged away.
990      We'll remove this bit after partitioning.  */
991   if (flag_reorder_blocks_and_partition)
992     e_flags |= EDGE_PRESERVE;
993 
994   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
995     {
996       basic_block bb;
997       rtx_insn *seq;
998 
999       if (lp == NULL || lp->post_landing_pad == NULL)
1000 	continue;
1001 
1002       start_sequence ();
1003 
1004       lp->landing_pad = gen_label_rtx ();
1005       emit_label (lp->landing_pad);
1006       LABEL_PRESERVE_P (lp->landing_pad) = 1;
1007 
1008       expand_dw2_landing_pad_for_region (lp->region);
1009 
1010       seq = get_insns ();
1011       end_sequence ();
1012 
1013       bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1014       bb->count = bb->next_bb->count;
1015       make_single_succ_edge (bb, bb->next_bb, e_flags);
1016       if (current_loops)
1017 	{
1018 	  class loop *loop = bb->next_bb->loop_father;
1019 	  /* If we created a pre-header block, add the new block to the
1020 	     outer loop, otherwise to the loop itself.  */
1021 	  if (bb->next_bb == loop->header)
1022 	    add_bb_to_loop (bb, loop_outer (loop));
1023 	  else
1024 	    add_bb_to_loop (bb, loop);
1025 	}
1026     }
1027 }
1028 
1029 
1030 static vec<int> sjlj_lp_call_site_index;
1031 
1032 /* Process all active landing pads.  Assign each one a compact dispatch
1033    index, and a call-site index.  */
1034 
1035 static int
sjlj_assign_call_site_values(void)1036 sjlj_assign_call_site_values (void)
1037 {
1038   action_hash_type ar_hash (31);
1039   int i, disp_index;
1040   eh_landing_pad lp;
1041 
1042   vec_alloc (crtl->eh.action_record_data, 64);
1043 
1044   disp_index = 0;
1045   call_site_base = 1;
1046   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1047     if (lp && lp->post_landing_pad)
1048       {
1049 	int action, call_site;
1050 
1051 	/* First: build the action table.  */
1052 	action = collect_one_action_chain (&ar_hash, lp->region);
1053 
1054 	/* Next: assign call-site values.  If dwarf2 terms, this would be
1055 	   the region number assigned by convert_to_eh_region_ranges, but
1056 	   handles no-action and must-not-throw differently.  */
1057 	/* Map must-not-throw to otherwise unused call-site index 0.  */
1058 	if (action == -2)
1059 	  call_site = 0;
1060 	/* Map no-action to otherwise unused call-site index -1.  */
1061 	else if (action == -1)
1062 	  call_site = -1;
1063 	/* Otherwise, look it up in the table.  */
1064 	else
1065 	  call_site = add_call_site (GEN_INT (disp_index), action, 0);
1066 	sjlj_lp_call_site_index[i] = call_site;
1067 
1068 	disp_index++;
1069       }
1070 
1071   return disp_index;
1072 }
1073 
1074 /* Emit code to record the current call-site index before every
1075    insn that can throw.  */
1076 
1077 static void
sjlj_mark_call_sites(void)1078 sjlj_mark_call_sites (void)
1079 {
1080   int last_call_site = -2;
1081   rtx_insn *insn;
1082   rtx mem;
1083 
1084   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1085     {
1086       eh_landing_pad lp;
1087       eh_region r;
1088       bool nothrow;
1089       int this_call_site;
1090       rtx_insn *before, *p;
1091 
1092       /* Reset value tracking at extended basic block boundaries.  */
1093       if (LABEL_P (insn))
1094 	last_call_site = -2;
1095 
1096       /* If the function allocates dynamic stack space, the context must
1097 	 be updated after every allocation/deallocation accordingly.  */
1098       if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1099 	{
1100 	  rtx buf_addr;
1101 
1102 	  start_sequence ();
1103 	  buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1104 				    sjlj_fc_jbuf_ofs);
1105 	  expand_builtin_update_setjmp_buf (buf_addr);
1106 	  p = get_insns ();
1107 	  end_sequence ();
1108 	  emit_insn_before (p, insn);
1109 	}
1110 
1111       if (! INSN_P (insn))
1112 	continue;
1113 
1114       nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1115       if (nothrow)
1116 	continue;
1117       if (lp)
1118 	this_call_site = sjlj_lp_call_site_index[lp->index];
1119       else if (r == NULL)
1120 	{
1121 	  /* Calls (and trapping insns) without notes are outside any
1122 	     exception handling region in this function.  Mark them as
1123 	     no action.  */
1124 	  this_call_site = -1;
1125 	}
1126       else
1127 	{
1128 	  gcc_assert (r->type == ERT_MUST_NOT_THROW);
1129 	  this_call_site = 0;
1130 	}
1131 
1132       if (this_call_site != -1)
1133 	crtl->uses_eh_lsda = 1;
1134 
1135       if (this_call_site == last_call_site)
1136 	continue;
1137 
1138       /* Don't separate a call from it's argument loads.  */
1139       before = insn;
1140       if (CALL_P (insn))
1141 	before = find_first_parameter_load (insn, NULL);
1142 
1143       start_sequence ();
1144       mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1145 			    sjlj_fc_call_site_ofs);
1146       emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1147       p = get_insns ();
1148       end_sequence ();
1149 
1150       emit_insn_before (p, before);
1151       last_call_site = this_call_site;
1152     }
1153 }
1154 
1155 /* Construct the SjLj_Function_Context.  */
1156 
1157 static void
sjlj_emit_function_enter(rtx_code_label * dispatch_label)1158 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1159 {
1160   rtx_insn *fn_begin, *seq;
1161   rtx fc, mem;
1162   bool fn_begin_outside_block;
1163   rtx personality = get_personality_function (current_function_decl);
1164 
1165   fc = crtl->eh.sjlj_fc;
1166 
1167   start_sequence ();
1168 
1169   /* We're storing this libcall's address into memory instead of
1170      calling it directly.  Thus, we must call assemble_external_libcall
1171      here, as we cannot depend on emit_library_call to do it for us.  */
1172   assemble_external_libcall (personality);
1173   mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1174   emit_move_insn (mem, personality);
1175 
1176   mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1177   if (crtl->uses_eh_lsda)
1178     {
1179       char buf[20];
1180       rtx sym;
1181 
1182       ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1183       sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1184       SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1185       emit_move_insn (mem, sym);
1186     }
1187   else
1188     emit_move_insn (mem, const0_rtx);
1189 
1190   if (dispatch_label)
1191     {
1192       rtx addr = plus_constant (Pmode, XEXP (fc, 0), sjlj_fc_jbuf_ofs);
1193 
1194 #ifdef DONT_USE_BUILTIN_SETJMP
1195       addr = copy_addr_to_reg (addr);
1196       addr = convert_memory_address (ptr_mode, addr);
1197       tree addr_tree = make_tree (ptr_type_node, addr);
1198 
1199       tree call_expr = build_call_expr (setjmp_fn, 1, addr_tree);
1200       rtx x = expand_call (call_expr, NULL_RTX, false);
1201 
1202       emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1203 			       TYPE_MODE (integer_type_node), 0,
1204 			       dispatch_label,
1205 			       profile_probability::unlikely ());
1206 #else
1207       expand_builtin_setjmp_setup (addr, dispatch_label);
1208 #endif
1209     }
1210 
1211   emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1212 		     XEXP (fc, 0), Pmode);
1213 
1214   seq = get_insns ();
1215   end_sequence ();
1216 
1217   /* ??? Instead of doing this at the beginning of the function,
1218      do this in a block that is at loop level 0 and dominates all
1219      can_throw_internal instructions.  */
1220 
1221   fn_begin_outside_block = true;
1222   for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1223     if (NOTE_P (fn_begin))
1224       {
1225 	if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1226 	  break;
1227 	else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1228 	  fn_begin_outside_block = false;
1229       }
1230 
1231 #ifdef DONT_USE_BUILTIN_SETJMP
1232   if (dispatch_label)
1233     {
1234       /* The sequence contains a branch in the middle so we need to force
1235 	 the creation of a new basic block by means of BB_SUPERBLOCK.  */
1236       if (fn_begin_outside_block)
1237 	{
1238 	  basic_block bb
1239 	    = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1240 	  if (JUMP_P (BB_END (bb)))
1241 	    emit_insn_before (seq, BB_END (bb));
1242 	  else
1243 	    emit_insn_after (seq, BB_END (bb));
1244 	}
1245       else
1246 	emit_insn_after (seq, fn_begin);
1247 
1248       single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flags |= BB_SUPERBLOCK;
1249       return;
1250     }
1251 #endif
1252 
1253   if (fn_begin_outside_block)
1254     insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1255   else
1256     emit_insn_after (seq, fn_begin);
1257 }
1258 
1259 /* Call back from expand_function_end to know where we should put
1260    the call to unwind_sjlj_unregister_libfunc if needed.  */
1261 
1262 void
sjlj_emit_function_exit_after(rtx_insn * after)1263 sjlj_emit_function_exit_after (rtx_insn *after)
1264 {
1265   crtl->eh.sjlj_exit_after = after;
1266 }
1267 
1268 static void
sjlj_emit_function_exit(void)1269 sjlj_emit_function_exit (void)
1270 {
1271   rtx_insn *seq, *insn;
1272 
1273   start_sequence ();
1274 
1275   emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1276 		     XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1277 
1278   seq = get_insns ();
1279   end_sequence ();
1280 
1281   /* ??? Really this can be done in any block at loop level 0 that
1282      post-dominates all can_throw_internal instructions.  This is
1283      the last possible moment.  */
1284 
1285   insn = crtl->eh.sjlj_exit_after;
1286   if (LABEL_P (insn))
1287     insn = NEXT_INSN (insn);
1288 
1289   emit_insn_after (seq, insn);
1290 }
1291 
1292 static void
sjlj_emit_dispatch_table(rtx_code_label * dispatch_label,int num_dispatch)1293 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1294 {
1295   scalar_int_mode unwind_word_mode = targetm.unwind_word_mode ();
1296   scalar_int_mode filter_mode = targetm.eh_return_filter_mode ();
1297   eh_landing_pad lp;
1298   rtx mem, fc, exc_ptr_reg, filter_reg;
1299   rtx_insn *seq;
1300   basic_block bb;
1301   eh_region r;
1302   int i, disp_index;
1303   vec<tree> dispatch_labels = vNULL;
1304 
1305   fc = crtl->eh.sjlj_fc;
1306 
1307   start_sequence ();
1308 
1309   emit_label (dispatch_label);
1310 
1311 #ifndef DONT_USE_BUILTIN_SETJMP
1312   expand_builtin_setjmp_receiver (dispatch_label);
1313 
1314   /* The caller of expand_builtin_setjmp_receiver is responsible for
1315      making sure that the label doesn't vanish.  The only other caller
1316      is the expander for __builtin_setjmp_receiver, which places this
1317      label on the nonlocal_goto_label list.  Since we're modeling these
1318      CFG edges more exactly, we can use the forced_labels list instead.  */
1319   LABEL_PRESERVE_P (dispatch_label) = 1;
1320   vec_safe_push<rtx_insn *> (forced_labels, dispatch_label);
1321 #endif
1322 
1323   /* Load up exc_ptr and filter values from the function context.  */
1324   mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1325   if (unwind_word_mode != ptr_mode)
1326     {
1327 #ifdef POINTERS_EXTEND_UNSIGNED
1328       mem = convert_memory_address (ptr_mode, mem);
1329 #else
1330       mem = convert_to_mode (ptr_mode, mem, 0);
1331 #endif
1332     }
1333   exc_ptr_reg = force_reg (ptr_mode, mem);
1334 
1335   mem = adjust_address (fc, unwind_word_mode,
1336 			sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1337   if (unwind_word_mode != filter_mode)
1338     mem = convert_to_mode (filter_mode, mem, 0);
1339   filter_reg = force_reg (filter_mode, mem);
1340 
1341   /* Jump to one of the directly reachable regions.  */
1342 
1343   disp_index = 0;
1344   rtx_code_label *first_reachable_label = NULL;
1345 
1346   /* If there's exactly one call site in the function, don't bother
1347      generating a switch statement.  */
1348   if (num_dispatch > 1)
1349     dispatch_labels.create (num_dispatch);
1350 
1351   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1352     if (lp && lp->post_landing_pad)
1353       {
1354 	rtx_insn *seq2;
1355 	rtx_code_label *label;
1356 
1357 	start_sequence ();
1358 
1359 	lp->landing_pad = dispatch_label;
1360 
1361 	if (num_dispatch > 1)
1362 	  {
1363 	    tree t_label, case_elt, t;
1364 
1365 	    t_label = create_artificial_label (UNKNOWN_LOCATION);
1366 	    t = build_int_cst (integer_type_node, disp_index);
1367 	    case_elt = build_case_label (t, NULL, t_label);
1368 	    dispatch_labels.quick_push (case_elt);
1369 	    label = jump_target_rtx (t_label);
1370 	  }
1371 	else
1372 	  label = gen_label_rtx ();
1373 
1374 	if (disp_index == 0)
1375 	  first_reachable_label = label;
1376 	emit_label (label);
1377 
1378 	r = lp->region;
1379 	if (r->exc_ptr_reg)
1380 	  emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1381 	if (r->filter_reg)
1382 	  emit_move_insn (r->filter_reg, filter_reg);
1383 
1384 	seq2 = get_insns ();
1385 	end_sequence ();
1386 
1387 	rtx_insn *before = label_rtx (lp->post_landing_pad);
1388 	bb = emit_to_new_bb_before (seq2, before);
1389 	make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1390 	if (current_loops)
1391 	  {
1392 	    class loop *loop = bb->next_bb->loop_father;
1393 	    /* If we created a pre-header block, add the new block to the
1394 	       outer loop, otherwise to the loop itself.  */
1395 	    if (bb->next_bb == loop->header)
1396 	      add_bb_to_loop (bb, loop_outer (loop));
1397 	    else
1398 	      add_bb_to_loop (bb, loop);
1399 	    /* ???  For multiple dispatches we will end up with edges
1400 	       from the loop tree root into this loop, making it a
1401 	       multiple-entry loop.  Discard all affected loops.  */
1402 	    if (num_dispatch > 1)
1403 	      {
1404 		for (loop = bb->loop_father;
1405 		     loop_outer (loop); loop = loop_outer (loop))
1406 		  mark_loop_for_removal (loop);
1407 	      }
1408 	  }
1409 
1410 	disp_index++;
1411       }
1412   gcc_assert (disp_index == num_dispatch);
1413 
1414   if (num_dispatch > 1)
1415     {
1416       rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1417 				 sjlj_fc_call_site_ofs);
1418       expand_sjlj_dispatch_table (disp, dispatch_labels);
1419     }
1420 
1421   seq = get_insns ();
1422   end_sequence ();
1423 
1424   bb = emit_to_new_bb_before (seq, first_reachable_label);
1425   if (num_dispatch == 1)
1426     {
1427       make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1428       if (current_loops)
1429 	{
1430 	  class loop *loop = bb->next_bb->loop_father;
1431 	  /* If we created a pre-header block, add the new block to the
1432 	     outer loop, otherwise to the loop itself.  */
1433 	  if (bb->next_bb == loop->header)
1434 	    add_bb_to_loop (bb, loop_outer (loop));
1435 	  else
1436 	    add_bb_to_loop (bb, loop);
1437 	}
1438     }
1439   else
1440     {
1441       /* We are not wiring up edges here, but as the dispatcher call
1442          is at function begin simply associate the block with the
1443 	 outermost (non-)loop.  */
1444       if (current_loops)
1445 	add_bb_to_loop (bb, current_loops->tree_root);
1446     }
1447 }
1448 
1449 static void
sjlj_build_landing_pads(void)1450 sjlj_build_landing_pads (void)
1451 {
1452   int num_dispatch;
1453 
1454   num_dispatch = vec_safe_length (cfun->eh->lp_array);
1455   if (num_dispatch == 0)
1456     return;
1457   sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1458 
1459   num_dispatch = sjlj_assign_call_site_values ();
1460   if (num_dispatch > 0)
1461     {
1462       rtx_code_label *dispatch_label = gen_label_rtx ();
1463       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1464 					TYPE_MODE (sjlj_fc_type_node),
1465 					TYPE_ALIGN (sjlj_fc_type_node));
1466       crtl->eh.sjlj_fc
1467 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1468 			      int_size_in_bytes (sjlj_fc_type_node),
1469 			      align);
1470 
1471       sjlj_mark_call_sites ();
1472       sjlj_emit_function_enter (dispatch_label);
1473       sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1474       sjlj_emit_function_exit ();
1475     }
1476 
1477   /* If we do not have any landing pads, we may still need to register a
1478      personality routine and (empty) LSDA to handle must-not-throw regions.  */
1479   else if (function_needs_eh_personality (cfun) != eh_personality_none)
1480     {
1481       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1482 					TYPE_MODE (sjlj_fc_type_node),
1483 					TYPE_ALIGN (sjlj_fc_type_node));
1484       crtl->eh.sjlj_fc
1485 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1486 			      int_size_in_bytes (sjlj_fc_type_node),
1487 			      align);
1488 
1489       sjlj_mark_call_sites ();
1490       sjlj_emit_function_enter (NULL);
1491       sjlj_emit_function_exit ();
1492     }
1493 
1494   sjlj_lp_call_site_index.release ();
1495 }
1496 
1497 /* Update the sjlj function context.  This function should be called
1498    whenever we allocate or deallocate dynamic stack space.  */
1499 
1500 void
update_sjlj_context(void)1501 update_sjlj_context (void)
1502 {
1503   if (!flag_exceptions)
1504     return;
1505 
1506   emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1507 }
1508 
1509 /* After initial rtl generation, call back to finish generating
1510    exception support code.  */
1511 
1512 void
finish_eh_generation(void)1513 finish_eh_generation (void)
1514 {
1515   basic_block bb;
1516 
1517   /* Construct the landing pads.  */
1518   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1519     sjlj_build_landing_pads ();
1520   else
1521     dw2_build_landing_pads ();
1522 
1523   break_superblocks ();
1524 
1525   /* Redirect all EH edges from the post_landing_pad to the landing pad.  */
1526   FOR_EACH_BB_FN (bb, cfun)
1527     {
1528       eh_landing_pad lp;
1529       edge_iterator ei;
1530       edge e;
1531 
1532       lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1533 
1534       FOR_EACH_EDGE (e, ei, bb->succs)
1535 	if (e->flags & EDGE_EH)
1536 	  break;
1537 
1538       /* We should not have generated any new throwing insns during this
1539 	 pass, and we should not have lost any EH edges, so we only need
1540 	 to handle two cases here:
1541 	 (1) reachable handler and an existing edge to post-landing-pad,
1542 	 (2) no reachable handler and no edge.  */
1543       gcc_assert ((lp != NULL) == (e != NULL));
1544       if (lp != NULL)
1545 	{
1546 	  gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1547 
1548 	  redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1549 	  e->flags |= (CALL_P (BB_END (bb))
1550 		       ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1551 		       : EDGE_ABNORMAL);
1552 	}
1553     }
1554 
1555   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1556       /* Kludge for Alpha (see alpha_gp_save_rtx).  */
1557       || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1558     commit_edge_insertions ();
1559 }
1560 
1561 /* This section handles removing dead code for flow.  */
1562 
1563 void
remove_eh_landing_pad(eh_landing_pad lp)1564 remove_eh_landing_pad (eh_landing_pad lp)
1565 {
1566   eh_landing_pad *pp;
1567 
1568   for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1569     continue;
1570   *pp = lp->next_lp;
1571 
1572   if (lp->post_landing_pad)
1573     EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1574   (*cfun->eh->lp_array)[lp->index] = NULL;
1575 }
1576 
1577 /* Splice the EH region at PP from the region tree.  */
1578 
1579 static void
remove_eh_handler_splicer(eh_region * pp)1580 remove_eh_handler_splicer (eh_region *pp)
1581 {
1582   eh_region region = *pp;
1583   eh_landing_pad lp;
1584 
1585   for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1586     {
1587       if (lp->post_landing_pad)
1588 	EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1589       (*cfun->eh->lp_array)[lp->index] = NULL;
1590     }
1591 
1592   if (region->inner)
1593     {
1594       eh_region p, outer;
1595       outer = region->outer;
1596 
1597       *pp = p = region->inner;
1598       do
1599 	{
1600 	  p->outer = outer;
1601 	  pp = &p->next_peer;
1602 	  p = *pp;
1603 	}
1604       while (p);
1605     }
1606   *pp = region->next_peer;
1607 
1608   (*cfun->eh->region_array)[region->index] = NULL;
1609 }
1610 
1611 /* Splice a single EH region REGION from the region tree.
1612 
1613    To unlink REGION, we need to find the pointer to it with a relatively
1614    expensive search in REGION's outer region.  If you are going to
1615    remove a number of handlers, using remove_unreachable_eh_regions may
1616    be a better option.  */
1617 
1618 void
remove_eh_handler(eh_region region)1619 remove_eh_handler (eh_region region)
1620 {
1621   eh_region *pp, *pp_start, p, outer;
1622 
1623   outer = region->outer;
1624   if (outer)
1625     pp_start = &outer->inner;
1626   else
1627     pp_start = &cfun->eh->region_tree;
1628   for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1629     continue;
1630 
1631   remove_eh_handler_splicer (pp);
1632 }
1633 
1634 /* Worker for remove_unreachable_eh_regions.
1635    PP is a pointer to the region to start a region tree depth-first
1636    search from.  R_REACHABLE is the set of regions that have to be
1637    preserved.  */
1638 
1639 static void
remove_unreachable_eh_regions_worker(eh_region * pp,sbitmap r_reachable)1640 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1641 {
1642   while (*pp)
1643     {
1644       eh_region region = *pp;
1645       remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1646       if (!bitmap_bit_p (r_reachable, region->index))
1647 	remove_eh_handler_splicer (pp);
1648       else
1649 	pp = &region->next_peer;
1650     }
1651 }
1652 
1653 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1654    Do this by traversing the EH tree top-down and splice out regions that
1655    are not marked.  By removing regions from the leaves, we avoid costly
1656    searches in the region tree.  */
1657 
1658 void
remove_unreachable_eh_regions(sbitmap r_reachable)1659 remove_unreachable_eh_regions (sbitmap r_reachable)
1660 {
1661   remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1662 }
1663 
1664 /* Invokes CALLBACK for every exception handler landing pad label.
1665    Only used by reload hackery; should not be used by new code.  */
1666 
1667 void
for_each_eh_label(void (* callback)(rtx))1668 for_each_eh_label (void (*callback) (rtx))
1669 {
1670   eh_landing_pad lp;
1671   int i;
1672 
1673   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1674     {
1675       if (lp)
1676 	{
1677 	  rtx_code_label *lab = lp->landing_pad;
1678 	  if (lab && LABEL_P (lab))
1679 	    (*callback) (lab);
1680 	}
1681     }
1682 }
1683 
1684 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1685    call insn.
1686 
1687    At the gimple level, we use LP_NR
1688        > 0 : The statement transfers to landing pad LP_NR
1689        = 0 : The statement is outside any EH region
1690        < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1691 
1692    At the rtl level, we use LP_NR
1693        > 0 : The insn transfers to landing pad LP_NR
1694        = 0 : The insn cannot throw
1695        < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1696        = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1697        missing note: The insn is outside any EH region.
1698 
1699   ??? This difference probably ought to be avoided.  We could stand
1700   to record nothrow for arbitrary gimple statements, and so avoid
1701   some moderately complex lookups in stmt_could_throw_p.  Perhaps
1702   NOTHROW should be mapped on both sides to INT_MIN.  Perhaps the
1703   no-nonlocal-goto property should be recorded elsewhere as a bit
1704   on the call_insn directly.  Perhaps we should make more use of
1705   attaching the trees to call_insns (reachable via symbol_ref in
1706   direct call cases) and just pull the data out of the trees.  */
1707 
1708 void
make_reg_eh_region_note(rtx_insn * insn,int ecf_flags,int lp_nr)1709 make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
1710 {
1711   rtx value;
1712   if (ecf_flags & ECF_NOTHROW)
1713     value = const0_rtx;
1714   else if (lp_nr != 0)
1715     value = GEN_INT (lp_nr);
1716   else
1717     return;
1718   add_reg_note (insn, REG_EH_REGION, value);
1719 }
1720 
1721 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1722    nor perform a non-local goto.  Replace the region note if it
1723    already exists.  */
1724 
1725 void
make_reg_eh_region_note_nothrow_nononlocal(rtx_insn * insn)1726 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
1727 {
1728   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1729   rtx intmin = GEN_INT (INT_MIN);
1730 
1731   if (note != 0)
1732     XEXP (note, 0) = intmin;
1733   else
1734     add_reg_note (insn, REG_EH_REGION, intmin);
1735 }
1736 
1737 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1738    to the contrary.  */
1739 
1740 bool
insn_could_throw_p(const_rtx insn)1741 insn_could_throw_p (const_rtx insn)
1742 {
1743   if (!flag_exceptions)
1744     return false;
1745   if (CALL_P (insn))
1746     return true;
1747   if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1748     return may_trap_p (PATTERN (insn));
1749   return false;
1750 }
1751 
1752 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1753    at FIRST and ending at LAST.  NOTE_OR_INSN is either the source insn
1754    to look for a note, or the note itself.  */
1755 
1756 void
copy_reg_eh_region_note_forward(rtx note_or_insn,rtx_insn * first,rtx last)1757 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1758 {
1759   rtx_insn *insn;
1760   rtx note = note_or_insn;
1761 
1762   if (INSN_P (note_or_insn))
1763     {
1764       note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1765       if (note == NULL)
1766 	return;
1767     }
1768   else if (is_a <rtx_insn *> (note_or_insn))
1769     return;
1770   note = XEXP (note, 0);
1771 
1772   for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1773     if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1774         && insn_could_throw_p (insn))
1775       add_reg_note (insn, REG_EH_REGION, note);
1776 }
1777 
1778 /* Likewise, but iterate backward.  */
1779 
1780 void
copy_reg_eh_region_note_backward(rtx note_or_insn,rtx_insn * last,rtx first)1781 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1782 {
1783   rtx_insn *insn;
1784   rtx note = note_or_insn;
1785 
1786   if (INSN_P (note_or_insn))
1787     {
1788       note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1789       if (note == NULL)
1790 	return;
1791     }
1792   else if (is_a <rtx_insn *> (note_or_insn))
1793     return;
1794   note = XEXP (note, 0);
1795 
1796   for (insn = last; insn != first; insn = PREV_INSN (insn))
1797     if (insn_could_throw_p (insn))
1798       add_reg_note (insn, REG_EH_REGION, note);
1799 }
1800 
1801 
1802 /* Extract all EH information from INSN.  Return true if the insn
1803    was marked NOTHROW.  */
1804 
1805 static bool
get_eh_region_and_lp_from_rtx(const_rtx insn,eh_region * pr,eh_landing_pad * plp)1806 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1807 			       eh_landing_pad *plp)
1808 {
1809   eh_landing_pad lp = NULL;
1810   eh_region r = NULL;
1811   bool ret = false;
1812   rtx note;
1813   int lp_nr;
1814 
1815   if (! INSN_P (insn))
1816     goto egress;
1817 
1818   if (NONJUMP_INSN_P (insn)
1819       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1820     insn = XVECEXP (PATTERN (insn), 0, 0);
1821 
1822   note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1823   if (!note)
1824     {
1825       ret = !insn_could_throw_p (insn);
1826       goto egress;
1827     }
1828 
1829   lp_nr = INTVAL (XEXP (note, 0));
1830   if (lp_nr == 0 || lp_nr == INT_MIN)
1831     {
1832       ret = true;
1833       goto egress;
1834     }
1835 
1836   if (lp_nr < 0)
1837     r = (*cfun->eh->region_array)[-lp_nr];
1838   else
1839     {
1840       lp = (*cfun->eh->lp_array)[lp_nr];
1841       r = lp->region;
1842     }
1843 
1844  egress:
1845   *plp = lp;
1846   *pr = r;
1847   return ret;
1848 }
1849 
1850 /* Return the landing pad to which INSN may go, or NULL if it does not
1851    have a reachable landing pad within this function.  */
1852 
1853 eh_landing_pad
get_eh_landing_pad_from_rtx(const_rtx insn)1854 get_eh_landing_pad_from_rtx (const_rtx insn)
1855 {
1856   eh_landing_pad lp;
1857   eh_region r;
1858 
1859   get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1860   return lp;
1861 }
1862 
1863 /* Return the region to which INSN may go, or NULL if it does not
1864    have a reachable region within this function.  */
1865 
1866 eh_region
get_eh_region_from_rtx(const_rtx insn)1867 get_eh_region_from_rtx (const_rtx insn)
1868 {
1869   eh_landing_pad lp;
1870   eh_region r;
1871 
1872   get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1873   return r;
1874 }
1875 
1876 /* Return true if INSN throws and is caught by something in this function.  */
1877 
1878 bool
can_throw_internal(const_rtx insn)1879 can_throw_internal (const_rtx insn)
1880 {
1881   return get_eh_landing_pad_from_rtx (insn) != NULL;
1882 }
1883 
1884 /* Return true if INSN throws and escapes from the current function.  */
1885 
1886 bool
can_throw_external(const_rtx insn)1887 can_throw_external (const_rtx insn)
1888 {
1889   eh_landing_pad lp;
1890   eh_region r;
1891   bool nothrow;
1892 
1893   if (! INSN_P (insn))
1894     return false;
1895 
1896   if (NONJUMP_INSN_P (insn)
1897       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1898     {
1899       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1900       int i, n = seq->len ();
1901 
1902       for (i = 0; i < n; i++)
1903 	if (can_throw_external (seq->element (i)))
1904 	  return true;
1905 
1906       return false;
1907     }
1908 
1909   nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1910 
1911   /* If we can't throw, we obviously can't throw external.  */
1912   if (nothrow)
1913     return false;
1914 
1915   /* If we have an internal landing pad, then we're not external.  */
1916   if (lp != NULL)
1917     return false;
1918 
1919   /* If we're not within an EH region, then we are external.  */
1920   if (r == NULL)
1921     return true;
1922 
1923   /* The only thing that ought to be left is MUST_NOT_THROW regions,
1924      which don't always have landing pads.  */
1925   gcc_assert (r->type == ERT_MUST_NOT_THROW);
1926   return false;
1927 }
1928 
1929 /* Return true if INSN cannot throw at all.  */
1930 
1931 bool
insn_nothrow_p(const_rtx insn)1932 insn_nothrow_p (const_rtx insn)
1933 {
1934   eh_landing_pad lp;
1935   eh_region r;
1936 
1937   if (! INSN_P (insn))
1938     return true;
1939 
1940   if (NONJUMP_INSN_P (insn)
1941       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1942     {
1943       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1944       int i, n = seq->len ();
1945 
1946       for (i = 0; i < n; i++)
1947 	if (!insn_nothrow_p (seq->element (i)))
1948 	  return false;
1949 
1950       return true;
1951     }
1952 
1953   return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1954 }
1955 
1956 /* Return true if INSN can perform a non-local goto.  */
1957 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION.  */
1958 
1959 bool
can_nonlocal_goto(const rtx_insn * insn)1960 can_nonlocal_goto (const rtx_insn *insn)
1961 {
1962   if (nonlocal_goto_handler_labels && CALL_P (insn))
1963     {
1964       rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1965       if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1966 	return true;
1967     }
1968   return false;
1969 }
1970 
1971 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls.  */
1972 
1973 static unsigned int
set_nothrow_function_flags(void)1974 set_nothrow_function_flags (void)
1975 {
1976   rtx_insn *insn;
1977 
1978   crtl->nothrow = 1;
1979 
1980   /* Assume crtl->all_throwers_are_sibcalls until we encounter
1981      something that can throw an exception.  We specifically exempt
1982      CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1983      and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
1984      is optimistic.  */
1985 
1986   crtl->all_throwers_are_sibcalls = 1;
1987 
1988   /* If we don't know that this implementation of the function will
1989      actually be used, then we must not set TREE_NOTHROW, since
1990      callers must not assume that this function does not throw.  */
1991   if (TREE_NOTHROW (current_function_decl))
1992     return 0;
1993 
1994   if (! flag_exceptions)
1995     return 0;
1996 
1997   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1998     if (can_throw_external (insn))
1999       {
2000         crtl->nothrow = 0;
2001 
2002 	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2003 	  {
2004 	    crtl->all_throwers_are_sibcalls = 0;
2005 	    return 0;
2006 	  }
2007       }
2008 
2009   if (crtl->nothrow
2010       && (cgraph_node::get (current_function_decl)->get_availability ()
2011           >= AVAIL_AVAILABLE))
2012     {
2013       struct cgraph_node *node = cgraph_node::get (current_function_decl);
2014       struct cgraph_edge *e;
2015       for (e = node->callers; e; e = e->next_caller)
2016         e->can_throw_external = false;
2017       node->set_nothrow_flag (true);
2018 
2019       if (dump_file)
2020 	fprintf (dump_file, "Marking function nothrow: %s\n\n",
2021 		 current_function_name ());
2022     }
2023   return 0;
2024 }
2025 
2026 namespace {
2027 
2028 const pass_data pass_data_set_nothrow_function_flags =
2029 {
2030   RTL_PASS, /* type */
2031   "nothrow", /* name */
2032   OPTGROUP_NONE, /* optinfo_flags */
2033   TV_NONE, /* tv_id */
2034   0, /* properties_required */
2035   0, /* properties_provided */
2036   0, /* properties_destroyed */
2037   0, /* todo_flags_start */
2038   0, /* todo_flags_finish */
2039 };
2040 
2041 class pass_set_nothrow_function_flags : public rtl_opt_pass
2042 {
2043 public:
pass_set_nothrow_function_flags(gcc::context * ctxt)2044   pass_set_nothrow_function_flags (gcc::context *ctxt)
2045     : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2046   {}
2047 
2048   /* opt_pass methods: */
execute(function *)2049   virtual unsigned int execute (function *)
2050     {
2051       return set_nothrow_function_flags ();
2052     }
2053 
2054 }; // class pass_set_nothrow_function_flags
2055 
2056 } // anon namespace
2057 
2058 rtl_opt_pass *
make_pass_set_nothrow_function_flags(gcc::context * ctxt)2059 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2060 {
2061   return new pass_set_nothrow_function_flags (ctxt);
2062 }
2063 
2064 
2065 /* Various hooks for unwind library.  */
2066 
2067 /* Expand the EH support builtin functions:
2068    __builtin_eh_pointer and __builtin_eh_filter.  */
2069 
2070 static eh_region
expand_builtin_eh_common(tree region_nr_t)2071 expand_builtin_eh_common (tree region_nr_t)
2072 {
2073   HOST_WIDE_INT region_nr;
2074   eh_region region;
2075 
2076   gcc_assert (tree_fits_shwi_p (region_nr_t));
2077   region_nr = tree_to_shwi (region_nr_t);
2078 
2079   region = (*cfun->eh->region_array)[region_nr];
2080 
2081   /* ??? We shouldn't have been able to delete a eh region without
2082      deleting all the code that depended on it.  */
2083   gcc_assert (region != NULL);
2084 
2085   return region;
2086 }
2087 
2088 /* Expand to the exc_ptr value from the given eh region.  */
2089 
2090 rtx
expand_builtin_eh_pointer(tree exp)2091 expand_builtin_eh_pointer (tree exp)
2092 {
2093   eh_region region
2094     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2095   if (region->exc_ptr_reg == NULL)
2096     region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2097   return region->exc_ptr_reg;
2098 }
2099 
2100 /* Expand to the filter value from the given eh region.  */
2101 
2102 rtx
expand_builtin_eh_filter(tree exp)2103 expand_builtin_eh_filter (tree exp)
2104 {
2105   eh_region region
2106     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2107   if (region->filter_reg == NULL)
2108     region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2109   return region->filter_reg;
2110 }
2111 
2112 /* Copy the exc_ptr and filter values from one landing pad's registers
2113    to another.  This is used to inline the resx statement.  */
2114 
2115 rtx
expand_builtin_eh_copy_values(tree exp)2116 expand_builtin_eh_copy_values (tree exp)
2117 {
2118   eh_region dst
2119     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2120   eh_region src
2121     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2122   scalar_int_mode fmode = targetm.eh_return_filter_mode ();
2123 
2124   if (dst->exc_ptr_reg == NULL)
2125     dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2126   if (src->exc_ptr_reg == NULL)
2127     src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2128 
2129   if (dst->filter_reg == NULL)
2130     dst->filter_reg = gen_reg_rtx (fmode);
2131   if (src->filter_reg == NULL)
2132     src->filter_reg = gen_reg_rtx (fmode);
2133 
2134   emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2135   emit_move_insn (dst->filter_reg, src->filter_reg);
2136 
2137   return const0_rtx;
2138 }
2139 
2140 /* Do any necessary initialization to access arbitrary stack frames.
2141    On the SPARC, this means flushing the register windows.  */
2142 
2143 void
expand_builtin_unwind_init(void)2144 expand_builtin_unwind_init (void)
2145 {
2146   /* Set this so all the registers get saved in our frame; we need to be
2147      able to copy the saved values for any registers from frames we unwind.  */
2148   crtl->saves_all_registers = 1;
2149 
2150   SETUP_FRAME_ADDRESSES ();
2151 }
2152 
2153 /* Map a non-negative number to an eh return data register number; expands
2154    to -1 if no return data register is associated with the input number.
2155    At least the inputs 0 and 1 must be mapped; the target may provide more.  */
2156 
2157 rtx
expand_builtin_eh_return_data_regno(tree exp)2158 expand_builtin_eh_return_data_regno (tree exp)
2159 {
2160   tree which = CALL_EXPR_ARG (exp, 0);
2161   unsigned HOST_WIDE_INT iwhich;
2162 
2163   if (TREE_CODE (which) != INTEGER_CST)
2164     {
2165       error ("argument of %<__builtin_eh_return_regno%> must be constant");
2166       return constm1_rtx;
2167     }
2168 
2169   iwhich = tree_to_uhwi (which);
2170   iwhich = EH_RETURN_DATA_REGNO (iwhich);
2171   if (iwhich == INVALID_REGNUM)
2172     return constm1_rtx;
2173 
2174 #ifdef DWARF_FRAME_REGNUM
2175   iwhich = DWARF_FRAME_REGNUM (iwhich);
2176 #else
2177   iwhich = DBX_REGISTER_NUMBER (iwhich);
2178 #endif
2179 
2180   return GEN_INT (iwhich);
2181 }
2182 
2183 /* Given a value extracted from the return address register or stack slot,
2184    return the actual address encoded in that value.  */
2185 
2186 rtx
expand_builtin_extract_return_addr(tree addr_tree)2187 expand_builtin_extract_return_addr (tree addr_tree)
2188 {
2189   rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2190 
2191   if (GET_MODE (addr) != Pmode
2192       && GET_MODE (addr) != VOIDmode)
2193     {
2194 #ifdef POINTERS_EXTEND_UNSIGNED
2195       addr = convert_memory_address (Pmode, addr);
2196 #else
2197       addr = convert_to_mode (Pmode, addr, 0);
2198 #endif
2199     }
2200 
2201   /* First mask out any unwanted bits.  */
2202   rtx mask = MASK_RETURN_ADDR;
2203   if (mask)
2204     expand_and (Pmode, addr, mask, addr);
2205 
2206   /* Then adjust to find the real return address.  */
2207   if (RETURN_ADDR_OFFSET)
2208     addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2209 
2210   return addr;
2211 }
2212 
2213 /* Given an actual address in addr_tree, do any necessary encoding
2214    and return the value to be stored in the return address register or
2215    stack slot so the epilogue will return to that address.  */
2216 
2217 rtx
expand_builtin_frob_return_addr(tree addr_tree)2218 expand_builtin_frob_return_addr (tree addr_tree)
2219 {
2220   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2221 
2222   addr = convert_memory_address (Pmode, addr);
2223 
2224   if (RETURN_ADDR_OFFSET)
2225     {
2226       addr = force_reg (Pmode, addr);
2227       addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2228     }
2229 
2230   return addr;
2231 }
2232 
2233 /* Set up the epilogue with the magic bits we'll need to return to the
2234    exception handler.  */
2235 
2236 void
expand_builtin_eh_return(tree stackadj_tree ATTRIBUTE_UNUSED,tree handler_tree)2237 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2238 			  tree handler_tree)
2239 {
2240   rtx tmp;
2241 
2242 #ifdef EH_RETURN_STACKADJ_RTX
2243   tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2244 		     VOIDmode, EXPAND_NORMAL);
2245   tmp = convert_memory_address (Pmode, tmp);
2246   if (!crtl->eh.ehr_stackadj)
2247     crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp);
2248   else if (tmp != crtl->eh.ehr_stackadj)
2249     emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2250 #endif
2251 
2252   tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2253 		     VOIDmode, EXPAND_NORMAL);
2254   tmp = convert_memory_address (Pmode, tmp);
2255   if (!crtl->eh.ehr_handler)
2256     crtl->eh.ehr_handler = copy_addr_to_reg (tmp);
2257   else if (tmp != crtl->eh.ehr_handler)
2258     emit_move_insn (crtl->eh.ehr_handler, tmp);
2259 
2260   if (!crtl->eh.ehr_label)
2261     crtl->eh.ehr_label = gen_label_rtx ();
2262   emit_jump (crtl->eh.ehr_label);
2263 }
2264 
2265 /* Expand __builtin_eh_return.  This exit path from the function loads up
2266    the eh return data registers, adjusts the stack, and branches to a
2267    given PC other than the normal return address.  */
2268 
2269 void
expand_eh_return(void)2270 expand_eh_return (void)
2271 {
2272   rtx_code_label *around_label;
2273 
2274   if (! crtl->eh.ehr_label)
2275     return;
2276 
2277   crtl->calls_eh_return = 1;
2278 
2279 #ifdef EH_RETURN_STACKADJ_RTX
2280   emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2281 #endif
2282 
2283   around_label = gen_label_rtx ();
2284   emit_jump (around_label);
2285 
2286   emit_label (crtl->eh.ehr_label);
2287   clobber_return_register ();
2288 
2289 #ifdef EH_RETURN_STACKADJ_RTX
2290   emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2291 #endif
2292 
2293   if (targetm.have_eh_return ())
2294     emit_insn (targetm.gen_eh_return (crtl->eh.ehr_handler));
2295   else
2296     {
2297       if (rtx handler = EH_RETURN_HANDLER_RTX)
2298 	emit_move_insn (handler, crtl->eh.ehr_handler);
2299       else
2300 	error ("%<__builtin_eh_return%> not supported on this target");
2301     }
2302 
2303   emit_label (around_label);
2304 }
2305 
2306 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2307    POINTERS_EXTEND_UNSIGNED and return it.  */
2308 
2309 rtx
expand_builtin_extend_pointer(tree addr_tree)2310 expand_builtin_extend_pointer (tree addr_tree)
2311 {
2312   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2313   int extend;
2314 
2315 #ifdef POINTERS_EXTEND_UNSIGNED
2316   extend = POINTERS_EXTEND_UNSIGNED;
2317 #else
2318   /* The previous EH code did an unsigned extend by default, so we do this also
2319      for consistency.  */
2320   extend = 1;
2321 #endif
2322 
2323   return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2324 }
2325 
2326 static int
add_action_record(action_hash_type * ar_hash,int filter,int next)2327 add_action_record (action_hash_type *ar_hash, int filter, int next)
2328 {
2329   struct action_record **slot, *new_ar, tmp;
2330 
2331   tmp.filter = filter;
2332   tmp.next = next;
2333   slot = ar_hash->find_slot (&tmp, INSERT);
2334 
2335   if ((new_ar = *slot) == NULL)
2336     {
2337       new_ar = XNEW (struct action_record);
2338       new_ar->offset = crtl->eh.action_record_data->length () + 1;
2339       new_ar->filter = filter;
2340       new_ar->next = next;
2341       *slot = new_ar;
2342 
2343       /* The filter value goes in untouched.  The link to the next
2344 	 record is a "self-relative" byte offset, or zero to indicate
2345 	 that there is no next record.  So convert the absolute 1 based
2346 	 indices we've been carrying around into a displacement.  */
2347 
2348       push_sleb128 (&crtl->eh.action_record_data, filter);
2349       if (next)
2350 	next -= crtl->eh.action_record_data->length () + 1;
2351       push_sleb128 (&crtl->eh.action_record_data, next);
2352     }
2353 
2354   return new_ar->offset;
2355 }
2356 
2357 static int
collect_one_action_chain(action_hash_type * ar_hash,eh_region region)2358 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2359 {
2360   int next;
2361 
2362   /* If we've reached the top of the region chain, then we have
2363      no actions, and require no landing pad.  */
2364   if (region == NULL)
2365     return -1;
2366 
2367   switch (region->type)
2368     {
2369     case ERT_CLEANUP:
2370       {
2371 	eh_region r;
2372 	/* A cleanup adds a zero filter to the beginning of the chain, but
2373 	   there are special cases to look out for.  If there are *only*
2374 	   cleanups along a path, then it compresses to a zero action.
2375 	   Further, if there are multiple cleanups along a path, we only
2376 	   need to represent one of them, as that is enough to trigger
2377 	   entry to the landing pad at runtime.  */
2378 	next = collect_one_action_chain (ar_hash, region->outer);
2379 	if (next <= 0)
2380 	  return 0;
2381 	for (r = region->outer; r ; r = r->outer)
2382 	  if (r->type == ERT_CLEANUP)
2383 	    return next;
2384 	return add_action_record (ar_hash, 0, next);
2385       }
2386 
2387     case ERT_TRY:
2388       {
2389 	eh_catch c;
2390 
2391 	/* Process the associated catch regions in reverse order.
2392 	   If there's a catch-all handler, then we don't need to
2393 	   search outer regions.  Use a magic -3 value to record
2394 	   that we haven't done the outer search.  */
2395 	next = -3;
2396 	for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2397 	  {
2398 	    if (c->type_list == NULL)
2399 	      {
2400 		/* Retrieve the filter from the head of the filter list
2401 		   where we have stored it (see assign_filter_values).  */
2402 		int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2403 		next = add_action_record (ar_hash, filter, 0);
2404 	      }
2405 	    else
2406 	      {
2407 		/* Once the outer search is done, trigger an action record for
2408 		   each filter we have.  */
2409 		tree flt_node;
2410 
2411 		if (next == -3)
2412 		  {
2413 		    next = collect_one_action_chain (ar_hash, region->outer);
2414 
2415 		    /* If there is no next action, terminate the chain.  */
2416 		    if (next == -1)
2417 		      next = 0;
2418 		    /* If all outer actions are cleanups or must_not_throw,
2419 		       we'll have no action record for it, since we had wanted
2420 		       to encode these states in the call-site record directly.
2421 		       Add a cleanup action to the chain to catch these.  */
2422 		    else if (next <= 0)
2423 		      next = add_action_record (ar_hash, 0, 0);
2424 		  }
2425 
2426 		flt_node = c->filter_list;
2427 		for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2428 		  {
2429 		    int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2430 		    next = add_action_record (ar_hash, filter, next);
2431 		  }
2432 	      }
2433 	  }
2434 	return next;
2435       }
2436 
2437     case ERT_ALLOWED_EXCEPTIONS:
2438       /* An exception specification adds its filter to the
2439 	 beginning of the chain.  */
2440       next = collect_one_action_chain (ar_hash, region->outer);
2441 
2442       /* If there is no next action, terminate the chain.  */
2443       if (next == -1)
2444 	next = 0;
2445       /* If all outer actions are cleanups or must_not_throw,
2446 	 we'll have no action record for it, since we had wanted
2447 	 to encode these states in the call-site record directly.
2448 	 Add a cleanup action to the chain to catch these.  */
2449       else if (next <= 0)
2450 	next = add_action_record (ar_hash, 0, 0);
2451 
2452       return add_action_record (ar_hash, region->u.allowed.filter, next);
2453 
2454     case ERT_MUST_NOT_THROW:
2455       /* A must-not-throw region with no inner handlers or cleanups
2456 	 requires no call-site entry.  Note that this differs from
2457 	 the no handler or cleanup case in that we do require an lsda
2458 	 to be generated.  Return a magic -2 value to record this.  */
2459       return -2;
2460     }
2461 
2462   gcc_unreachable ();
2463 }
2464 
2465 static int
add_call_site(rtx landing_pad,int action,int section)2466 add_call_site (rtx landing_pad, int action, int section)
2467 {
2468   call_site_record record;
2469 
2470   record = ggc_alloc<call_site_record_d> ();
2471   record->landing_pad = landing_pad;
2472   record->action = action;
2473 
2474   vec_safe_push (crtl->eh.call_site_record_v[section], record);
2475 
2476   return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2477 }
2478 
2479 static rtx_note *
emit_note_eh_region_end(rtx_insn * insn)2480 emit_note_eh_region_end (rtx_insn *insn)
2481 {
2482   return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2483 }
2484 
2485 /* Add NOP after NOTE_INSN_SWITCH_TEXT_SECTIONS when the cold section starts
2486    with landing pad.
2487    With landing pad being at offset 0 from the start label of the section
2488    we would miss EH delivery because 0 is special and means no landing pad.  */
2489 
2490 static bool
maybe_add_nop_after_section_switch(void)2491 maybe_add_nop_after_section_switch (void)
2492 {
2493   if (!crtl->uses_eh_lsda
2494       || !crtl->eh.call_site_record_v[1])
2495     return false;
2496   int n = vec_safe_length (crtl->eh.call_site_record_v[1]);
2497   hash_set<rtx_insn *> visited;
2498 
2499   for (int i = 0; i < n; ++i)
2500     {
2501       struct call_site_record_d *cs
2502 	 = (*crtl->eh.call_site_record_v[1])[i];
2503       if (cs->landing_pad)
2504 	{
2505 	  rtx_insn *insn = as_a <rtx_insn *> (cs->landing_pad);
2506 	  while (true)
2507 	    {
2508 	      /* Landing pads have LABEL_PRESERVE_P flag set.  This check make
2509 		 sure that we do not walk past landing pad visited earlier
2510 		 which would result in possible quadratic behaviour.  */
2511 	      if (LABEL_P (insn) && LABEL_PRESERVE_P (insn)
2512 		  && visited.add (insn))
2513 		break;
2514 
2515 	      /* Conservatively assume that ASM insn may be empty.  We have
2516 		 now way to tell what they contain.  */
2517 	      if (active_insn_p (insn)
2518 		  && GET_CODE (PATTERN (insn)) != ASM_INPUT
2519 		  && GET_CODE (PATTERN (insn)) != ASM_OPERANDS)
2520 		break;
2521 
2522 	      /* If we reached the start of hot section, then NOP will be
2523 		 needed.  */
2524 	      if (GET_CODE (insn) == NOTE
2525 		  && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2526 		{
2527 		  emit_insn_after (gen_nop (), insn);
2528 		  break;
2529 		}
2530 
2531 	      /* We visit only labels from cold section.  We should never hit
2532 		 begining of the insn stream here.  */
2533 	      insn = PREV_INSN (insn);
2534 	    }
2535 	}
2536     }
2537   return false;
2538 }
2539 
2540 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2541    The new note numbers will not refer to region numbers, but
2542    instead to call site entries.  */
2543 
2544 static unsigned int
convert_to_eh_region_ranges(void)2545 convert_to_eh_region_ranges (void)
2546 {
2547   rtx insn;
2548   rtx_insn *iter;
2549   rtx_note *note;
2550   action_hash_type ar_hash (31);
2551   int last_action = -3;
2552   rtx_insn *last_action_insn = NULL;
2553   rtx last_landing_pad = NULL_RTX;
2554   rtx_insn *first_no_action_insn = NULL;
2555   int call_site = 0;
2556   int cur_sec = 0;
2557   rtx_insn *section_switch_note = NULL;
2558   rtx_insn *first_no_action_insn_before_switch = NULL;
2559   rtx_insn *last_no_action_insn_before_switch = NULL;
2560   int saved_call_site_base = call_site_base;
2561 
2562   vec_alloc (crtl->eh.action_record_data, 64);
2563 
2564   for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2565     if (INSN_P (iter))
2566       {
2567 	eh_landing_pad lp;
2568 	eh_region region;
2569 	bool nothrow;
2570 	int this_action;
2571 	rtx_code_label *this_landing_pad;
2572 
2573 	insn = iter;
2574 	if (NONJUMP_INSN_P (insn)
2575 	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
2576 	  insn = XVECEXP (PATTERN (insn), 0, 0);
2577 
2578 	nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2579 	if (nothrow)
2580 	  continue;
2581 	if (region)
2582 	  this_action = collect_one_action_chain (&ar_hash, region);
2583 	else
2584 	  this_action = -1;
2585 
2586 	/* Existence of catch handlers, or must-not-throw regions
2587 	   implies that an lsda is needed (even if empty).  */
2588 	if (this_action != -1)
2589 	  crtl->uses_eh_lsda = 1;
2590 
2591 	/* Delay creation of region notes for no-action regions
2592 	   until we're sure that an lsda will be required.  */
2593 	else if (last_action == -3)
2594 	  {
2595 	    first_no_action_insn = iter;
2596 	    last_action = -1;
2597 	  }
2598 
2599 	if (this_action >= 0)
2600 	  this_landing_pad = lp->landing_pad;
2601 	else
2602 	  this_landing_pad = NULL;
2603 
2604 	/* Differing actions or landing pads implies a change in call-site
2605 	   info, which implies some EH_REGION note should be emitted.  */
2606 	if (last_action != this_action
2607 	    || last_landing_pad != this_landing_pad)
2608 	  {
2609 	    /* If there is a queued no-action region in the other section
2610 	       with hot/cold partitioning, emit it now.  */
2611 	    if (first_no_action_insn_before_switch)
2612 	      {
2613 		gcc_assert (this_action != -1
2614 			    && last_action == (first_no_action_insn
2615 					       ? -1 : -3));
2616 		call_site = add_call_site (NULL_RTX, 0, 0);
2617 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2618 					 first_no_action_insn_before_switch);
2619 		NOTE_EH_HANDLER (note) = call_site;
2620 		note
2621 		  = emit_note_eh_region_end (last_no_action_insn_before_switch);
2622 		NOTE_EH_HANDLER (note) = call_site;
2623 		gcc_assert (last_action != -3
2624 			    || (last_action_insn
2625 				== last_no_action_insn_before_switch));
2626 		first_no_action_insn_before_switch = NULL;
2627 		last_no_action_insn_before_switch = NULL;
2628 		call_site_base++;
2629 	      }
2630 	    /* If we'd not seen a previous action (-3) or the previous
2631 	       action was must-not-throw (-2), then we do not need an
2632 	       end note.  */
2633 	    if (last_action >= -1)
2634 	      {
2635 		/* If we delayed the creation of the begin, do it now.  */
2636 		if (first_no_action_insn)
2637 		  {
2638 		    call_site = add_call_site (NULL_RTX, 0, cur_sec);
2639 		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2640 					     first_no_action_insn);
2641 		    NOTE_EH_HANDLER (note) = call_site;
2642 		    first_no_action_insn = NULL;
2643 		  }
2644 
2645 		note = emit_note_eh_region_end (last_action_insn);
2646 		NOTE_EH_HANDLER (note) = call_site;
2647 	      }
2648 
2649 	    /* If the new action is must-not-throw, then no region notes
2650 	       are created.  */
2651 	    if (this_action >= -1)
2652 	      {
2653 		call_site = add_call_site (this_landing_pad,
2654 					   this_action < 0 ? 0 : this_action,
2655 					   cur_sec);
2656 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2657 		NOTE_EH_HANDLER (note) = call_site;
2658 	      }
2659 
2660 	    last_action = this_action;
2661 	    last_landing_pad = this_landing_pad;
2662 	  }
2663 	last_action_insn = iter;
2664       }
2665     else if (NOTE_P (iter)
2666 	     && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2667       {
2668 	gcc_assert (section_switch_note == NULL_RTX);
2669 	gcc_assert (flag_reorder_blocks_and_partition);
2670 	section_switch_note = iter;
2671 	if (first_no_action_insn)
2672 	  {
2673 	    first_no_action_insn_before_switch = first_no_action_insn;
2674 	    last_no_action_insn_before_switch = last_action_insn;
2675 	    first_no_action_insn = NULL;
2676 	    gcc_assert (last_action == -1);
2677 	    last_action = -3;
2678 	  }
2679 	/* Force closing of current EH region before section switch and
2680 	   opening a new one afterwards.  */
2681 	else if (last_action != -3)
2682 	  last_landing_pad = pc_rtx;
2683 	if (crtl->eh.call_site_record_v[cur_sec])
2684 	  call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2685 	cur_sec++;
2686 	gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2687 	vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2688       }
2689 
2690   if (last_action >= -1 && ! first_no_action_insn)
2691     {
2692       note = emit_note_eh_region_end (last_action_insn);
2693       NOTE_EH_HANDLER (note) = call_site;
2694     }
2695 
2696   call_site_base = saved_call_site_base;
2697 
2698   return 0;
2699 }
2700 
2701 namespace {
2702 
2703 const pass_data pass_data_convert_to_eh_region_ranges =
2704 {
2705   RTL_PASS, /* type */
2706   "eh_ranges", /* name */
2707   OPTGROUP_NONE, /* optinfo_flags */
2708   TV_NONE, /* tv_id */
2709   0, /* properties_required */
2710   0, /* properties_provided */
2711   0, /* properties_destroyed */
2712   0, /* todo_flags_start */
2713   0, /* todo_flags_finish */
2714 };
2715 
2716 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2717 {
2718 public:
pass_convert_to_eh_region_ranges(gcc::context * ctxt)2719   pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2720     : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2721   {}
2722 
2723   /* opt_pass methods: */
2724   virtual bool gate (function *);
execute(function *)2725   virtual unsigned int execute (function *)
2726     {
2727       int ret = convert_to_eh_region_ranges ();
2728       maybe_add_nop_after_section_switch ();
2729       return ret;
2730     }
2731 
2732 }; // class pass_convert_to_eh_region_ranges
2733 
2734 bool
gate(function *)2735 pass_convert_to_eh_region_ranges::gate (function *)
2736 {
2737   /* Nothing to do for SJLJ exceptions or if no regions created.  */
2738   if (cfun->eh->region_tree == NULL)
2739     return false;
2740   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2741     return false;
2742   return true;
2743 }
2744 
2745 } // anon namespace
2746 
2747 rtl_opt_pass *
make_pass_convert_to_eh_region_ranges(gcc::context * ctxt)2748 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2749 {
2750   return new pass_convert_to_eh_region_ranges (ctxt);
2751 }
2752 
2753 static void
push_uleb128(vec<uchar,va_gc> ** data_area,unsigned int value)2754 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2755 {
2756   do
2757     {
2758       unsigned char byte = value & 0x7f;
2759       value >>= 7;
2760       if (value)
2761 	byte |= 0x80;
2762       vec_safe_push (*data_area, byte);
2763     }
2764   while (value);
2765 }
2766 
2767 static void
push_sleb128(vec<uchar,va_gc> ** data_area,int value)2768 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2769 {
2770   unsigned char byte;
2771   int more;
2772 
2773   do
2774     {
2775       byte = value & 0x7f;
2776       value >>= 7;
2777       more = ! ((value == 0 && (byte & 0x40) == 0)
2778 		|| (value == -1 && (byte & 0x40) != 0));
2779       if (more)
2780 	byte |= 0x80;
2781       vec_safe_push (*data_area, byte);
2782     }
2783   while (more);
2784 }
2785 
2786 
2787 static int
dw2_size_of_call_site_table(int section)2788 dw2_size_of_call_site_table (int section)
2789 {
2790   int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2791   int size = n * (4 + 4 + 4);
2792   int i;
2793 
2794   for (i = 0; i < n; ++i)
2795     {
2796       struct call_site_record_d *cs =
2797 	(*crtl->eh.call_site_record_v[section])[i];
2798       size += size_of_uleb128 (cs->action);
2799     }
2800 
2801   return size;
2802 }
2803 
2804 static int
sjlj_size_of_call_site_table(void)2805 sjlj_size_of_call_site_table (void)
2806 {
2807   int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2808   int size = 0;
2809   int i;
2810 
2811   for (i = 0; i < n; ++i)
2812     {
2813       struct call_site_record_d *cs =
2814 	(*crtl->eh.call_site_record_v[0])[i];
2815       size += size_of_uleb128 (INTVAL (cs->landing_pad));
2816       size += size_of_uleb128 (cs->action);
2817     }
2818 
2819   return size;
2820 }
2821 
2822 static void
dw2_output_call_site_table(int cs_format,int section)2823 dw2_output_call_site_table (int cs_format, int section)
2824 {
2825   int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2826   int i;
2827   const char *begin;
2828 
2829   if (section == 0)
2830     begin = current_function_func_begin_label;
2831   else if (first_function_block_is_cold)
2832     begin = crtl->subsections.hot_section_label;
2833   else
2834     begin = crtl->subsections.cold_section_label;
2835 
2836   for (i = 0; i < n; ++i)
2837     {
2838       struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2839       char reg_start_lab[32];
2840       char reg_end_lab[32];
2841       char landing_pad_lab[32];
2842 
2843       ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2844       ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2845 
2846       if (cs->landing_pad)
2847 	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2848 				     CODE_LABEL_NUMBER (cs->landing_pad));
2849 
2850       /* ??? Perhaps use insn length scaling if the assembler supports
2851 	 generic arithmetic.  */
2852       /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2853 	 data4 if the function is small enough.  */
2854       if (cs_format == DW_EH_PE_uleb128)
2855 	{
2856 	  dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2857 					"region %d start", i);
2858 	  dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2859 					"length");
2860 	  if (cs->landing_pad)
2861 	    dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2862 					  "landing pad");
2863 	  else
2864 	    dw2_asm_output_data_uleb128 (0, "landing pad");
2865 	}
2866       else
2867 	{
2868 	  dw2_asm_output_delta (4, reg_start_lab, begin,
2869 				"region %d start", i);
2870 	  dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2871 	  if (cs->landing_pad)
2872 	    dw2_asm_output_delta (4, landing_pad_lab, begin,
2873 				  "landing pad");
2874 	  else
2875 	    dw2_asm_output_data (4, 0, "landing pad");
2876 	}
2877       dw2_asm_output_data_uleb128 (cs->action, "action");
2878     }
2879 
2880   call_site_base += n;
2881 }
2882 
2883 static void
sjlj_output_call_site_table(void)2884 sjlj_output_call_site_table (void)
2885 {
2886   int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2887   int i;
2888 
2889   for (i = 0; i < n; ++i)
2890     {
2891       struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2892 
2893       dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2894 				   "region %d landing pad", i);
2895       dw2_asm_output_data_uleb128 (cs->action, "action");
2896     }
2897 
2898   call_site_base += n;
2899 }
2900 
2901 /* Switch to the section that should be used for exception tables.  */
2902 
2903 static void
switch_to_exception_section(const char * ARG_UNUSED (fnname))2904 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2905 {
2906   section *s;
2907 
2908   if (exception_section)
2909     s = exception_section;
2910   else
2911     {
2912       int flags;
2913 
2914       if (EH_TABLES_CAN_BE_READ_ONLY)
2915 	{
2916 	  int tt_format =
2917 	    ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2918 	  flags = ((! flag_pic
2919 		    || ((tt_format & 0x70) != DW_EH_PE_absptr
2920 			&& (tt_format & 0x70) != DW_EH_PE_aligned))
2921 		   ? 0 : SECTION_WRITE);
2922 	}
2923       else
2924 	flags = SECTION_WRITE;
2925 
2926       /* Compute the section and cache it into exception_section,
2927 	 unless it depends on the function name.  */
2928       if (targetm_common.have_named_sections)
2929 	{
2930 #ifdef HAVE_LD_EH_GC_SECTIONS
2931 	  if (flag_function_sections
2932 	      || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2933 	    {
2934 	      char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2935 	      /* The EH table must match the code section, so only mark
2936 		 it linkonce if we have COMDAT groups to tie them together.  */
2937 	      if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2938 		flags |= SECTION_LINKONCE;
2939 	      sprintf (section_name, ".gcc_except_table.%s", fnname);
2940 	      s = get_section (section_name, flags, current_function_decl);
2941 	      free (section_name);
2942 	    }
2943 	  else
2944 #endif
2945 	    exception_section
2946 	      = s = get_section (".gcc_except_table", flags, NULL);
2947 	}
2948       else
2949 	exception_section
2950 	  = s = flags == SECTION_WRITE ? data_section : readonly_data_section;
2951     }
2952 
2953   switch_to_section (s);
2954 }
2955 
2956 /* Output a reference from an exception table to the type_info object TYPE.
2957    TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2958    the value.  */
2959 
2960 static void
output_ttype(tree type,int tt_format,int tt_format_size)2961 output_ttype (tree type, int tt_format, int tt_format_size)
2962 {
2963   rtx value;
2964   bool is_public = true;
2965 
2966   if (type == NULL_TREE)
2967     value = const0_rtx;
2968   else
2969     {
2970       /* FIXME lto.  pass_ipa_free_lang_data changes all types to
2971 	 runtime types so TYPE should already be a runtime type
2972 	 reference.  When pass_ipa_free_lang data is made a default
2973 	 pass, we can then remove the call to lookup_type_for_runtime
2974 	 below.  */
2975       if (TYPE_P (type))
2976 	type = lookup_type_for_runtime (type);
2977 
2978       value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2979 
2980       /* Let cgraph know that the rtti decl is used.  Not all of the
2981 	 paths below go through assemble_integer, which would take
2982 	 care of this for us.  */
2983       STRIP_NOPS (type);
2984       if (TREE_CODE (type) == ADDR_EXPR)
2985 	{
2986 	  type = TREE_OPERAND (type, 0);
2987 	  if (VAR_P (type))
2988 	    is_public = TREE_PUBLIC (type);
2989 	}
2990       else
2991 	gcc_assert (TREE_CODE (type) == INTEGER_CST);
2992     }
2993 
2994   /* Allow the target to override the type table entry format.  */
2995   if (targetm.asm_out.ttype (value))
2996     return;
2997 
2998   if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2999     assemble_integer (value, tt_format_size,
3000 		      tt_format_size * BITS_PER_UNIT, 1);
3001   else
3002     dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3003 }
3004 
3005 /* Output an exception table for the current function according to SECTION.
3006 
3007    If the function has been partitioned into hot and cold parts, value 0 for
3008    SECTION refers to the table associated with the hot part while value 1
3009    refers to the table associated with the cold part.  If the function has
3010    not been partitioned, value 0 refers to the single exception table.  */
3011 
3012 static void
output_one_function_exception_table(int section)3013 output_one_function_exception_table (int section)
3014 {
3015   int tt_format, cs_format, lp_format, i;
3016   char ttype_label[32];
3017   char cs_after_size_label[32];
3018   char cs_end_label[32];
3019   int call_site_len;
3020   int have_tt_data;
3021   int tt_format_size = 0;
3022 
3023   have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
3024 		  || (targetm.arm_eabi_unwinder
3025 		      ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
3026 		      : vec_safe_length (cfun->eh->ehspec_data.other)));
3027 
3028   /* Indicate the format of the @TType entries.  */
3029   if (! have_tt_data)
3030     tt_format = DW_EH_PE_omit;
3031   else
3032     {
3033       tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3034       if (HAVE_AS_LEB128)
3035 	ASM_GENERATE_INTERNAL_LABEL (ttype_label,
3036 				     section ? "LLSDATTC" : "LLSDATT",
3037 				     current_function_funcdef_no);
3038 
3039       tt_format_size = size_of_encoded_value (tt_format);
3040 
3041       assemble_align (tt_format_size * BITS_PER_UNIT);
3042     }
3043 
3044   targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
3045 				  current_function_funcdef_no);
3046 
3047   /* The LSDA header.  */
3048 
3049   /* Indicate the format of the landing pad start pointer.  An omitted
3050      field implies @LPStart == @Start.  */
3051   /* Currently we always put @LPStart == @Start.  This field would
3052      be most useful in moving the landing pads completely out of
3053      line to another section, but it could also be used to minimize
3054      the size of uleb128 landing pad offsets.  */
3055   lp_format = DW_EH_PE_omit;
3056   dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3057 		       eh_data_format_name (lp_format));
3058 
3059   /* @LPStart pointer would go here.  */
3060 
3061   dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3062 		       eh_data_format_name (tt_format));
3063 
3064   if (!HAVE_AS_LEB128)
3065     {
3066       if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3067 	call_site_len = sjlj_size_of_call_site_table ();
3068       else
3069 	call_site_len = dw2_size_of_call_site_table (section);
3070     }
3071 
3072   /* A pc-relative 4-byte displacement to the @TType data.  */
3073   if (have_tt_data)
3074     {
3075       if (HAVE_AS_LEB128)
3076 	{
3077 	  char ttype_after_disp_label[32];
3078 	  ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3079 				       section ? "LLSDATTDC" : "LLSDATTD",
3080 				       current_function_funcdef_no);
3081 	  dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3082 					"@TType base offset");
3083 	  ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3084 	}
3085       else
3086 	{
3087 	  /* Ug.  Alignment queers things.  */
3088 	  unsigned int before_disp, after_disp, last_disp, disp;
3089 
3090 	  before_disp = 1 + 1;
3091 	  after_disp = (1 + size_of_uleb128 (call_site_len)
3092 			+ call_site_len
3093 			+ vec_safe_length (crtl->eh.action_record_data)
3094 			+ (vec_safe_length (cfun->eh->ttype_data)
3095 			   * tt_format_size));
3096 
3097 	  disp = after_disp;
3098 	  do
3099 	    {
3100 	      unsigned int disp_size, pad;
3101 
3102 	      last_disp = disp;
3103 	      disp_size = size_of_uleb128 (disp);
3104 	      pad = before_disp + disp_size + after_disp;
3105 	      if (pad % tt_format_size)
3106 		pad = tt_format_size - (pad % tt_format_size);
3107 	      else
3108 		pad = 0;
3109 	      disp = after_disp + pad;
3110 	    }
3111 	  while (disp != last_disp);
3112 
3113 	  dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3114 	}
3115 	}
3116 
3117   /* Indicate the format of the call-site offsets.  */
3118   if (HAVE_AS_LEB128)
3119     cs_format = DW_EH_PE_uleb128;
3120   else
3121     cs_format = DW_EH_PE_udata4;
3122 
3123   dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3124 		       eh_data_format_name (cs_format));
3125 
3126   if (HAVE_AS_LEB128)
3127     {
3128       ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3129 				   section ? "LLSDACSBC" : "LLSDACSB",
3130 				   current_function_funcdef_no);
3131       ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3132 				   section ? "LLSDACSEC" : "LLSDACSE",
3133 				   current_function_funcdef_no);
3134       dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3135 				    "Call-site table length");
3136       ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3137       if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3138 	sjlj_output_call_site_table ();
3139       else
3140 	dw2_output_call_site_table (cs_format, section);
3141       ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3142     }
3143   else
3144     {
3145       dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3146       if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3147 	sjlj_output_call_site_table ();
3148       else
3149 	dw2_output_call_site_table (cs_format, section);
3150     }
3151 
3152   /* ??? Decode and interpret the data for flag_debug_asm.  */
3153   {
3154     uchar uc;
3155     FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3156       dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3157   }
3158 
3159   if (have_tt_data)
3160     assemble_align (tt_format_size * BITS_PER_UNIT);
3161 
3162   i = vec_safe_length (cfun->eh->ttype_data);
3163   while (i-- > 0)
3164     {
3165       tree type = (*cfun->eh->ttype_data)[i];
3166       output_ttype (type, tt_format, tt_format_size);
3167     }
3168 
3169   if (HAVE_AS_LEB128 && have_tt_data)
3170     ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3171 
3172   /* ??? Decode and interpret the data for flag_debug_asm.  */
3173   if (targetm.arm_eabi_unwinder)
3174     {
3175       tree type;
3176       for (i = 0;
3177 	   vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3178 	output_ttype (type, tt_format, tt_format_size);
3179     }
3180   else
3181     {
3182       uchar uc;
3183       for (i = 0;
3184 	   vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3185 	dw2_asm_output_data (1, uc,
3186 			     i ? NULL : "Exception specification table");
3187     }
3188 }
3189 
3190 /* Output an exception table for the current function according to SECTION,
3191    switching back and forth from the function section appropriately.
3192 
3193    If the function has been partitioned into hot and cold parts, value 0 for
3194    SECTION refers to the table associated with the hot part while value 1
3195    refers to the table associated with the cold part.  If the function has
3196    not been partitioned, value 0 refers to the single exception table.  */
3197 
3198 void
output_function_exception_table(int section)3199 output_function_exception_table (int section)
3200 {
3201   const char *fnname = get_fnname_from_decl (current_function_decl);
3202   rtx personality = get_personality_function (current_function_decl);
3203 
3204   /* Not all functions need anything.  */
3205   if (!crtl->uses_eh_lsda
3206       || targetm_common.except_unwind_info (&global_options) == UI_NONE)
3207     return;
3208 
3209   /* No need to emit any boilerplate stuff for the cold part.  */
3210   if (section == 1 && !crtl->eh.call_site_record_v[1])
3211     return;
3212 
3213   if (personality)
3214     {
3215       assemble_external_libcall (personality);
3216 
3217       if (targetm.asm_out.emit_except_personality)
3218 	targetm.asm_out.emit_except_personality (personality);
3219     }
3220 
3221   switch_to_exception_section (fnname);
3222 
3223   /* If the target wants a label to begin the table, emit it here.  */
3224   targetm.asm_out.emit_except_table_label (asm_out_file);
3225 
3226   /* Do the real work.  */
3227   output_one_function_exception_table (section);
3228 
3229   switch_to_section (current_function_section ());
3230 }
3231 
3232 void
set_eh_throw_stmt_table(function * fun,hash_map<gimple *,int> * table)3233 set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
3234 {
3235   fun->eh->throw_stmt_table = table;
3236 }
3237 
3238 hash_map<gimple *, int> *
get_eh_throw_stmt_table(struct function * fun)3239 get_eh_throw_stmt_table (struct function *fun)
3240 {
3241   return fun->eh->throw_stmt_table;
3242 }
3243 
3244 /* Determine if the function needs an EH personality function.  */
3245 
3246 enum eh_personality_kind
function_needs_eh_personality(struct function * fn)3247 function_needs_eh_personality (struct function *fn)
3248 {
3249   enum eh_personality_kind kind = eh_personality_none;
3250   eh_region i;
3251 
3252   FOR_ALL_EH_REGION_FN (i, fn)
3253     {
3254       switch (i->type)
3255 	{
3256 	case ERT_CLEANUP:
3257 	  /* Can do with any personality including the generic C one.  */
3258 	  kind = eh_personality_any;
3259 	  break;
3260 
3261 	case ERT_TRY:
3262 	case ERT_ALLOWED_EXCEPTIONS:
3263 	  /* Always needs a EH personality function.  The generic C
3264 	     personality doesn't handle these even for empty type lists.  */
3265 	  return eh_personality_lang;
3266 
3267 	case ERT_MUST_NOT_THROW:
3268 	  /* Always needs a EH personality function.  The language may specify
3269 	     what abort routine that must be used, e.g. std::terminate.  */
3270 	  return eh_personality_lang;
3271 	}
3272     }
3273 
3274   return kind;
3275 }
3276 
3277 /* Dump EH information to OUT.  */
3278 
3279 void
dump_eh_tree(FILE * out,struct function * fun)3280 dump_eh_tree (FILE * out, struct function *fun)
3281 {
3282   eh_region i;
3283   int depth = 0;
3284   static const char *const type_name[] = {
3285     "cleanup", "try", "allowed_exceptions", "must_not_throw"
3286   };
3287 
3288   i = fun->eh->region_tree;
3289   if (!i)
3290     return;
3291 
3292   fprintf (out, "Eh tree:\n");
3293   while (1)
3294     {
3295       fprintf (out, "  %*s %i %s", depth * 2, "",
3296 	       i->index, type_name[(int) i->type]);
3297 
3298       if (i->landing_pads)
3299 	{
3300 	  eh_landing_pad lp;
3301 
3302 	  fprintf (out, " land:");
3303 	  if (current_ir_type () == IR_GIMPLE)
3304 	    {
3305 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3306 		{
3307 		  fprintf (out, "{%i,", lp->index);
3308 		  print_generic_expr (out, lp->post_landing_pad);
3309 		  fputc ('}', out);
3310 		  if (lp->next_lp)
3311 		    fputc (',', out);
3312 		}
3313 	    }
3314 	  else
3315 	    {
3316 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3317 		{
3318 		  fprintf (out, "{%i,", lp->index);
3319 		  if (lp->landing_pad)
3320 		    fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3321 			     NOTE_P (lp->landing_pad) ? "(del)" : "");
3322 		  else
3323 		    fprintf (out, "(nil),");
3324 		  if (lp->post_landing_pad)
3325 		    {
3326 		      rtx_insn *lab = label_rtx (lp->post_landing_pad);
3327 		      fprintf (out, "%i%s}", INSN_UID (lab),
3328 			       NOTE_P (lab) ? "(del)" : "");
3329 		    }
3330 		  else
3331 		    fprintf (out, "(nil)}");
3332 		  if (lp->next_lp)
3333 		    fputc (',', out);
3334 		}
3335 	    }
3336 	}
3337 
3338       switch (i->type)
3339 	{
3340 	case ERT_CLEANUP:
3341 	case ERT_MUST_NOT_THROW:
3342 	  break;
3343 
3344 	case ERT_TRY:
3345 	  {
3346 	    eh_catch c;
3347 	    fprintf (out, " catch:");
3348 	    for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3349 	      {
3350 		fputc ('{', out);
3351 		if (c->label)
3352 		  {
3353 		    fprintf (out, "lab:");
3354 		    print_generic_expr (out, c->label);
3355 		    fputc (';', out);
3356 		  }
3357 		print_generic_expr (out, c->type_list);
3358 		fputc ('}', out);
3359 		if (c->next_catch)
3360 		  fputc (',', out);
3361 	      }
3362 	  }
3363 	  break;
3364 
3365 	case ERT_ALLOWED_EXCEPTIONS:
3366 	  fprintf (out, " filter :%i types:", i->u.allowed.filter);
3367 	  print_generic_expr (out, i->u.allowed.type_list);
3368 	  break;
3369 	}
3370       fputc ('\n', out);
3371 
3372       /* If there are sub-regions, process them.  */
3373       if (i->inner)
3374 	i = i->inner, depth++;
3375       /* If there are peers, process them.  */
3376       else if (i->next_peer)
3377 	i = i->next_peer;
3378       /* Otherwise, step back up the tree to the next peer.  */
3379       else
3380 	{
3381 	  do
3382 	    {
3383 	      i = i->outer;
3384 	      depth--;
3385 	      if (i == NULL)
3386 		return;
3387 	    }
3388 	  while (i->next_peer == NULL);
3389 	  i = i->next_peer;
3390 	}
3391     }
3392 }
3393 
3394 /* Dump the EH tree for FN on stderr.  */
3395 
3396 DEBUG_FUNCTION void
debug_eh_tree(struct function * fn)3397 debug_eh_tree (struct function *fn)
3398 {
3399   dump_eh_tree (stderr, fn);
3400 }
3401 
3402 /* Verify invariants on EH datastructures.  */
3403 
3404 DEBUG_FUNCTION void
verify_eh_tree(struct function * fun)3405 verify_eh_tree (struct function *fun)
3406 {
3407   eh_region r, outer;
3408   int nvisited_lp, nvisited_r;
3409   int count_lp, count_r, depth, i;
3410   eh_landing_pad lp;
3411   bool err = false;
3412 
3413   if (!fun->eh->region_tree)
3414     return;
3415 
3416   count_r = 0;
3417   for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3418     if (r)
3419       {
3420 	if (r->index == i)
3421 	  count_r++;
3422 	else
3423 	  {
3424 	    error ("%<region_array%> is corrupted for region %i", r->index);
3425 	    err = true;
3426 	  }
3427       }
3428 
3429   count_lp = 0;
3430   for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3431     if (lp)
3432       {
3433 	if (lp->index == i)
3434 	  count_lp++;
3435 	else
3436 	  {
3437 	    error ("%<lp_array%> is corrupted for lp %i", lp->index);
3438 	    err = true;
3439 	  }
3440       }
3441 
3442   depth = nvisited_lp = nvisited_r = 0;
3443   outer = NULL;
3444   r = fun->eh->region_tree;
3445   while (1)
3446     {
3447       if ((*fun->eh->region_array)[r->index] != r)
3448 	{
3449 	  error ("%<region_array%> is corrupted for region %i", r->index);
3450 	  err = true;
3451 	}
3452       if (r->outer != outer)
3453 	{
3454 	  error ("outer block of region %i is wrong", r->index);
3455 	  err = true;
3456 	}
3457       if (depth < 0)
3458 	{
3459 	  error ("negative nesting depth of region %i", r->index);
3460 	  err = true;
3461 	}
3462       nvisited_r++;
3463 
3464       for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3465 	{
3466 	  if ((*fun->eh->lp_array)[lp->index] != lp)
3467 	    {
3468 	      error ("%<lp_array%> is corrupted for lp %i", lp->index);
3469 	      err = true;
3470 	    }
3471 	  if (lp->region != r)
3472 	    {
3473 	      error ("region of lp %i is wrong", lp->index);
3474 	      err = true;
3475 	    }
3476 	  nvisited_lp++;
3477 	}
3478 
3479       if (r->inner)
3480 	outer = r, r = r->inner, depth++;
3481       else if (r->next_peer)
3482 	r = r->next_peer;
3483       else
3484 	{
3485 	  do
3486 	    {
3487 	      r = r->outer;
3488 	      if (r == NULL)
3489 		goto region_done;
3490 	      depth--;
3491 	      outer = r->outer;
3492 	    }
3493 	  while (r->next_peer == NULL);
3494 	  r = r->next_peer;
3495 	}
3496     }
3497  region_done:
3498   if (depth != 0)
3499     {
3500       error ("tree list ends on depth %i", depth);
3501       err = true;
3502     }
3503   if (count_r != nvisited_r)
3504     {
3505       error ("%<region_array%> does not match %<region_tree%>");
3506       err = true;
3507     }
3508   if (count_lp != nvisited_lp)
3509     {
3510       error ("%<lp_array%> does not match %<region_tree%>");
3511       err = true;
3512     }
3513 
3514   if (err)
3515     {
3516       dump_eh_tree (stderr, fun);
3517       internal_error ("%qs failed", __func__);
3518     }
3519 }
3520 
3521 #include "gt-except.h"
3522