1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
47 [ Add updated documentation on how to use this. ] */
48
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "flags.h"
57 #include "function.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "insn-config.h"
61 #include "except.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
65 #include "output.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
68 #include "dwarf2.h"
69 #include "toplev.h"
70 #include "hashtab.h"
71 #include "intl.h"
72 #include "ggc.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "langhooks.h"
76 #include "cgraph.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
79 #include "timevar.h"
80
81 /* Provide defaults for stuff that may not be defined when using
82 sjlj exceptions. */
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85 #endif
86
87
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 tree (*lang_protect_cleanup_actions) (void);
91
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
94
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
97
98 /* A hash table of label to region number. */
99
100 struct ehl_map_entry GTY(())
101 {
102 rtx label;
103 struct eh_region *region;
104 };
105
106 static GTY(()) int call_site_base;
107 static GTY ((param_is (union tree_node)))
108 htab_t type_to_runtime_map;
109
110 /* Describe the SjLj_Function_Context structure. */
111 static GTY(()) tree sjlj_fc_type_node;
112 static int sjlj_fc_call_site_ofs;
113 static int sjlj_fc_data_ofs;
114 static int sjlj_fc_personality_ofs;
115 static int sjlj_fc_lsda_ofs;
116 static int sjlj_fc_jbuf_ofs;
117
118 /* Describes one exception region. */
119 struct eh_region GTY(())
120 {
121 /* The immediately surrounding region. */
122 struct eh_region *outer;
123
124 /* The list of immediately contained regions. */
125 struct eh_region *inner;
126 struct eh_region *next_peer;
127
128 /* An identifier for this region. */
129 int region_number;
130
131 /* When a region is deleted, its parents inherit the REG_EH_REGION
132 numbers already assigned. */
133 bitmap aka;
134
135 /* Each region does exactly one thing. */
136 enum eh_region_type
137 {
138 ERT_UNKNOWN = 0,
139 ERT_CLEANUP,
140 ERT_TRY,
141 ERT_CATCH,
142 ERT_ALLOWED_EXCEPTIONS,
143 ERT_MUST_NOT_THROW,
144 ERT_THROW
145 } type;
146
147 /* Holds the action to perform based on the preceding type. */
148 union eh_region_u {
149 /* A list of catch blocks, a surrounding try block,
150 and the label for continuing after a catch. */
151 struct eh_region_u_try {
152 struct eh_region *catch;
153 struct eh_region *last_catch;
154 } GTY ((tag ("ERT_TRY"))) try;
155
156 /* The list through the catch handlers, the list of type objects
157 matched, and the list of associated filters. */
158 struct eh_region_u_catch {
159 struct eh_region *next_catch;
160 struct eh_region *prev_catch;
161 tree type_list;
162 tree filter_list;
163 } GTY ((tag ("ERT_CATCH"))) catch;
164
165 /* A tree_list of allowed types. */
166 struct eh_region_u_allowed {
167 tree type_list;
168 int filter;
169 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
170
171 /* The type given by a call to "throw foo();", or discovered
172 for a throw. */
173 struct eh_region_u_throw {
174 tree type;
175 } GTY ((tag ("ERT_THROW"))) throw;
176
177 /* Retain the cleanup expression even after expansion so that
178 we can match up fixup regions. */
179 struct eh_region_u_cleanup {
180 struct eh_region *prev_try;
181 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
182 } GTY ((desc ("%0.type"))) u;
183
184 /* Entry point for this region's handler before landing pads are built. */
185 rtx label;
186 tree tree_label;
187
188 /* Entry point for this region's handler from the runtime eh library. */
189 rtx landing_pad;
190
191 /* Entry point for this region's handler from an inner region. */
192 rtx post_landing_pad;
193
194 /* The RESX insn for handing off control to the next outermost handler,
195 if appropriate. */
196 rtx resume;
197
198 /* True if something in this region may throw. */
199 unsigned may_contain_throw : 1;
200 };
201
202 typedef struct eh_region *eh_region;
203
204 struct call_site_record GTY(())
205 {
206 rtx landing_pad;
207 int action;
208 };
209
210 DEF_VEC_P(eh_region);
211 DEF_VEC_ALLOC_P(eh_region, gc);
212
213 /* Used to save exception status for each function. */
214 struct eh_status GTY(())
215 {
216 /* The tree of all regions for this function. */
217 struct eh_region *region_tree;
218
219 /* The same information as an indexable array. */
220 VEC(eh_region,gc) *region_array;
221
222 /* The most recently open region. */
223 struct eh_region *cur_region;
224
225 /* This is the region for which we are processing catch blocks. */
226 struct eh_region *try_region;
227
228 rtx filter;
229 rtx exc_ptr;
230
231 int built_landing_pads;
232 int last_region_number;
233
234 VEC(tree,gc) *ttype_data;
235 varray_type ehspec_data;
236 varray_type action_record_data;
237
238 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
239
240 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
241 call_site_data;
242 int call_site_data_used;
243 int call_site_data_size;
244
245 rtx ehr_stackadj;
246 rtx ehr_handler;
247 rtx ehr_label;
248
249 rtx sjlj_fc;
250 rtx sjlj_exit_after;
251
252 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
253 };
254
255 static int t2r_eq (const void *, const void *);
256 static hashval_t t2r_hash (const void *);
257 static void add_type_for_runtime (tree);
258 static tree lookup_type_for_runtime (tree);
259
260 static void remove_unreachable_regions (rtx);
261
262 static int ttypes_filter_eq (const void *, const void *);
263 static hashval_t ttypes_filter_hash (const void *);
264 static int ehspec_filter_eq (const void *, const void *);
265 static hashval_t ehspec_filter_hash (const void *);
266 static int add_ttypes_entry (htab_t, tree);
267 static int add_ehspec_entry (htab_t, htab_t, tree);
268 static void assign_filter_values (void);
269 static void build_post_landing_pads (void);
270 static void connect_post_landing_pads (void);
271 static void dw2_build_landing_pads (void);
272
273 struct sjlj_lp_info;
274 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
275 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
276 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
277 static void sjlj_emit_function_enter (rtx);
278 static void sjlj_emit_function_exit (void);
279 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
280 static void sjlj_build_landing_pads (void);
281
282 static hashval_t ehl_hash (const void *);
283 static int ehl_eq (const void *, const void *);
284 static void add_ehl_entry (rtx, struct eh_region *);
285 static void remove_exception_handler_label (rtx);
286 static void remove_eh_handler (struct eh_region *);
287 static int for_each_eh_label_1 (void **, void *);
288
289 /* The return value of reachable_next_level. */
290 enum reachable_code
291 {
292 /* The given exception is not processed by the given region. */
293 RNL_NOT_CAUGHT,
294 /* The given exception may need processing by the given region. */
295 RNL_MAYBE_CAUGHT,
296 /* The given exception is completely processed by the given region. */
297 RNL_CAUGHT,
298 /* The given exception is completely processed by the runtime. */
299 RNL_BLOCKED
300 };
301
302 struct reachable_info;
303 static enum reachable_code reachable_next_level (struct eh_region *, tree,
304 struct reachable_info *);
305
306 static int action_record_eq (const void *, const void *);
307 static hashval_t action_record_hash (const void *);
308 static int add_action_record (htab_t, int, int);
309 static int collect_one_action_chain (htab_t, struct eh_region *);
310 static int add_call_site (rtx, int);
311
312 static void push_uleb128 (varray_type *, unsigned int);
313 static void push_sleb128 (varray_type *, int);
314 #ifndef HAVE_AS_LEB128
315 static int dw2_size_of_call_site_table (void);
316 static int sjlj_size_of_call_site_table (void);
317 #endif
318 static void dw2_output_call_site_table (void);
319 static void sjlj_output_call_site_table (void);
320
321
322 /* Routine to see if exception handling is turned on.
323 DO_WARN is nonzero if we want to inform the user that exception
324 handling is turned off.
325
326 This is used to ensure that -fexceptions has been specified if the
327 compiler tries to use any exception-specific functions. */
328
329 int
doing_eh(int do_warn)330 doing_eh (int do_warn)
331 {
332 if (! flag_exceptions)
333 {
334 static int warned = 0;
335 if (! warned && do_warn)
336 {
337 error ("exception handling disabled, use -fexceptions to enable");
338 warned = 1;
339 }
340 return 0;
341 }
342 return 1;
343 }
344
345
346 void
init_eh(void)347 init_eh (void)
348 {
349 if (! flag_exceptions)
350 return;
351
352 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
353
354 /* Create the SjLj_Function_Context structure. This should match
355 the definition in unwind-sjlj.c. */
356 if (USING_SJLJ_EXCEPTIONS)
357 {
358 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
359
360 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
361
362 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
363 build_pointer_type (sjlj_fc_type_node));
364 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
365
366 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
367 integer_type_node);
368 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
369
370 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
371 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
372 tmp);
373 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
374 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
375
376 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
377 ptr_type_node);
378 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
379
380 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
381 ptr_type_node);
382 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
383
384 #ifdef DONT_USE_BUILTIN_SETJMP
385 #ifdef JMP_BUF_SIZE
386 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
387 #else
388 /* Should be large enough for most systems, if it is not,
389 JMP_BUF_SIZE should be defined with the proper value. It will
390 also tend to be larger than necessary for most systems, a more
391 optimal port will define JMP_BUF_SIZE. */
392 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
393 #endif
394 #else
395 /* builtin_setjmp takes a pointer to 5 words. */
396 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
397 #endif
398 tmp = build_index_type (tmp);
399 tmp = build_array_type (ptr_type_node, tmp);
400 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
401 #ifdef DONT_USE_BUILTIN_SETJMP
402 /* We don't know what the alignment requirements of the
403 runtime's jmp_buf has. Overestimate. */
404 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
405 DECL_USER_ALIGN (f_jbuf) = 1;
406 #endif
407 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
408
409 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
410 TREE_CHAIN (f_prev) = f_cs;
411 TREE_CHAIN (f_cs) = f_data;
412 TREE_CHAIN (f_data) = f_per;
413 TREE_CHAIN (f_per) = f_lsda;
414 TREE_CHAIN (f_lsda) = f_jbuf;
415
416 layout_type (sjlj_fc_type_node);
417
418 /* Cache the interesting field offsets so that we have
419 easy access from rtl. */
420 sjlj_fc_call_site_ofs
421 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
422 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
423 sjlj_fc_data_ofs
424 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
425 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
426 sjlj_fc_personality_ofs
427 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
428 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
429 sjlj_fc_lsda_ofs
430 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
431 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
432 sjlj_fc_jbuf_ofs
433 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
434 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
435 }
436 }
437
438 void
init_eh_for_function(void)439 init_eh_for_function (void)
440 {
441 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
442 }
443
444 /* Routines to generate the exception tree somewhat directly.
445 These are used from tree-eh.c when processing exception related
446 nodes during tree optimization. */
447
448 static struct eh_region *
gen_eh_region(enum eh_region_type type,struct eh_region * outer)449 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
450 {
451 struct eh_region *new;
452
453 #ifdef ENABLE_CHECKING
454 gcc_assert (doing_eh (0));
455 #endif
456
457 /* Insert a new blank region as a leaf in the tree. */
458 new = ggc_alloc_cleared (sizeof (*new));
459 new->type = type;
460 new->outer = outer;
461 if (outer)
462 {
463 new->next_peer = outer->inner;
464 outer->inner = new;
465 }
466 else
467 {
468 new->next_peer = cfun->eh->region_tree;
469 cfun->eh->region_tree = new;
470 }
471
472 new->region_number = ++cfun->eh->last_region_number;
473
474 return new;
475 }
476
477 struct eh_region *
gen_eh_region_cleanup(struct eh_region * outer,struct eh_region * prev_try)478 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
479 {
480 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
481 cleanup->u.cleanup.prev_try = prev_try;
482 return cleanup;
483 }
484
485 struct eh_region *
gen_eh_region_try(struct eh_region * outer)486 gen_eh_region_try (struct eh_region *outer)
487 {
488 return gen_eh_region (ERT_TRY, outer);
489 }
490
491 struct eh_region *
gen_eh_region_catch(struct eh_region * t,tree type_or_list)492 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
493 {
494 struct eh_region *c, *l;
495 tree type_list, type_node;
496
497 /* Ensure to always end up with a type list to normalize further
498 processing, then register each type against the runtime types map. */
499 type_list = type_or_list;
500 if (type_or_list)
501 {
502 if (TREE_CODE (type_or_list) != TREE_LIST)
503 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
504
505 type_node = type_list;
506 for (; type_node; type_node = TREE_CHAIN (type_node))
507 add_type_for_runtime (TREE_VALUE (type_node));
508 }
509
510 c = gen_eh_region (ERT_CATCH, t->outer);
511 c->u.catch.type_list = type_list;
512 l = t->u.try.last_catch;
513 c->u.catch.prev_catch = l;
514 if (l)
515 l->u.catch.next_catch = c;
516 else
517 t->u.try.catch = c;
518 t->u.try.last_catch = c;
519
520 return c;
521 }
522
523 struct eh_region *
gen_eh_region_allowed(struct eh_region * outer,tree allowed)524 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
525 {
526 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
527 region->u.allowed.type_list = allowed;
528
529 for (; allowed ; allowed = TREE_CHAIN (allowed))
530 add_type_for_runtime (TREE_VALUE (allowed));
531
532 return region;
533 }
534
535 struct eh_region *
gen_eh_region_must_not_throw(struct eh_region * outer)536 gen_eh_region_must_not_throw (struct eh_region *outer)
537 {
538 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
539 }
540
541 int
get_eh_region_number(struct eh_region * region)542 get_eh_region_number (struct eh_region *region)
543 {
544 return region->region_number;
545 }
546
547 bool
get_eh_region_may_contain_throw(struct eh_region * region)548 get_eh_region_may_contain_throw (struct eh_region *region)
549 {
550 return region->may_contain_throw;
551 }
552
553 tree
get_eh_region_tree_label(struct eh_region * region)554 get_eh_region_tree_label (struct eh_region *region)
555 {
556 return region->tree_label;
557 }
558
559 void
set_eh_region_tree_label(struct eh_region * region,tree lab)560 set_eh_region_tree_label (struct eh_region *region, tree lab)
561 {
562 region->tree_label = lab;
563 }
564
565 void
expand_resx_expr(tree exp)566 expand_resx_expr (tree exp)
567 {
568 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
569 struct eh_region *reg = VEC_index (eh_region,
570 cfun->eh->region_array, region_nr);
571
572 gcc_assert (!reg->resume);
573 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
574 emit_barrier ();
575 }
576
577 /* Note that the current EH region (if any) may contain a throw, or a
578 call to a function which itself may contain a throw. */
579
580 void
note_eh_region_may_contain_throw(struct eh_region * region)581 note_eh_region_may_contain_throw (struct eh_region *region)
582 {
583 while (region && !region->may_contain_throw)
584 {
585 region->may_contain_throw = 1;
586 region = region->outer;
587 }
588 }
589
590 void
note_current_region_may_contain_throw(void)591 note_current_region_may_contain_throw (void)
592 {
593 note_eh_region_may_contain_throw (cfun->eh->cur_region);
594 }
595
596
597 /* Return an rtl expression for a pointer to the exception object
598 within a handler. */
599
600 rtx
get_exception_pointer(struct function * fun)601 get_exception_pointer (struct function *fun)
602 {
603 rtx exc_ptr = fun->eh->exc_ptr;
604 if (fun == cfun && ! exc_ptr)
605 {
606 exc_ptr = gen_reg_rtx (ptr_mode);
607 fun->eh->exc_ptr = exc_ptr;
608 }
609 return exc_ptr;
610 }
611
612 /* Return an rtl expression for the exception dispatch filter
613 within a handler. */
614
615 rtx
get_exception_filter(struct function * fun)616 get_exception_filter (struct function *fun)
617 {
618 rtx filter = fun->eh->filter;
619 if (fun == cfun && ! filter)
620 {
621 filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
622 fun->eh->filter = filter;
623 }
624 return filter;
625 }
626
627 /* This section is for the exception handling specific optimization pass. */
628
629 /* Random access the exception region tree. */
630
631 void
collect_eh_region_array(void)632 collect_eh_region_array (void)
633 {
634 struct eh_region *i;
635
636 i = cfun->eh->region_tree;
637 if (! i)
638 return;
639
640 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
641 cfun->eh->last_region_number + 1);
642 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
643
644 while (1)
645 {
646 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
647
648 /* If there are sub-regions, process them. */
649 if (i->inner)
650 i = i->inner;
651 /* If there are peers, process them. */
652 else if (i->next_peer)
653 i = i->next_peer;
654 /* Otherwise, step back up the tree to the next peer. */
655 else
656 {
657 do {
658 i = i->outer;
659 if (i == NULL)
660 return;
661 } while (i->next_peer == NULL);
662 i = i->next_peer;
663 }
664 }
665 }
666
667 /* Remove all regions whose labels are not reachable from insns. */
668
669 static void
remove_unreachable_regions(rtx insns)670 remove_unreachable_regions (rtx insns)
671 {
672 int i, *uid_region_num;
673 bool *reachable;
674 struct eh_region *r;
675 rtx insn;
676
677 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
678 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
679
680 for (i = cfun->eh->last_region_number; i > 0; --i)
681 {
682 r = VEC_index (eh_region, cfun->eh->region_array, i);
683 if (!r || r->region_number != i)
684 continue;
685
686 if (r->resume)
687 {
688 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
689 uid_region_num[INSN_UID (r->resume)] = i;
690 }
691 if (r->label)
692 {
693 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
694 uid_region_num[INSN_UID (r->label)] = i;
695 }
696 }
697
698 for (insn = insns; insn; insn = NEXT_INSN (insn))
699 reachable[uid_region_num[INSN_UID (insn)]] = true;
700
701 for (i = cfun->eh->last_region_number; i > 0; --i)
702 {
703 r = VEC_index (eh_region, cfun->eh->region_array, i);
704 if (r && r->region_number == i && !reachable[i])
705 {
706 bool kill_it = true;
707 switch (r->type)
708 {
709 case ERT_THROW:
710 /* Don't remove ERT_THROW regions if their outer region
711 is reachable. */
712 if (r->outer && reachable[r->outer->region_number])
713 kill_it = false;
714 break;
715
716 case ERT_MUST_NOT_THROW:
717 /* MUST_NOT_THROW regions are implementable solely in the
718 runtime, but their existence continues to affect calls
719 within that region. Never delete them here. */
720 kill_it = false;
721 break;
722
723 case ERT_TRY:
724 {
725 /* TRY regions are reachable if any of its CATCH regions
726 are reachable. */
727 struct eh_region *c;
728 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
729 if (reachable[c->region_number])
730 {
731 kill_it = false;
732 break;
733 }
734 break;
735 }
736
737 default:
738 break;
739 }
740
741 if (kill_it)
742 remove_eh_handler (r);
743 }
744 }
745
746 free (reachable);
747 free (uid_region_num);
748 }
749
750 /* Set up EH labels for RTL. */
751
752 void
convert_from_eh_region_ranges(void)753 convert_from_eh_region_ranges (void)
754 {
755 rtx insns = get_insns ();
756 int i, n = cfun->eh->last_region_number;
757
758 /* Most of the work is already done at the tree level. All we need to
759 do is collect the rtl labels that correspond to the tree labels that
760 collect the rtl labels that correspond to the tree labels
761 we allocated earlier. */
762 for (i = 1; i <= n; ++i)
763 {
764 struct eh_region *region;
765
766 region = VEC_index (eh_region, cfun->eh->region_array, i);
767 if (region && region->tree_label)
768 region->label = DECL_RTL_IF_SET (region->tree_label);
769 }
770
771 remove_unreachable_regions (insns);
772 }
773
774 static void
add_ehl_entry(rtx label,struct eh_region * region)775 add_ehl_entry (rtx label, struct eh_region *region)
776 {
777 struct ehl_map_entry **slot, *entry;
778
779 LABEL_PRESERVE_P (label) = 1;
780
781 entry = ggc_alloc (sizeof (*entry));
782 entry->label = label;
783 entry->region = region;
784
785 slot = (struct ehl_map_entry **)
786 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
787
788 /* Before landing pad creation, each exception handler has its own
789 label. After landing pad creation, the exception handlers may
790 share landing pads. This is ok, since maybe_remove_eh_handler
791 only requires the 1-1 mapping before landing pad creation. */
792 gcc_assert (!*slot || cfun->eh->built_landing_pads);
793
794 *slot = entry;
795 }
796
797 void
find_exception_handler_labels(void)798 find_exception_handler_labels (void)
799 {
800 int i;
801
802 if (cfun->eh->exception_handler_label_map)
803 htab_empty (cfun->eh->exception_handler_label_map);
804 else
805 {
806 /* ??? The expansion factor here (3/2) must be greater than the htab
807 occupancy factor (4/3) to avoid unnecessary resizing. */
808 cfun->eh->exception_handler_label_map
809 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
810 ehl_hash, ehl_eq, NULL);
811 }
812
813 if (cfun->eh->region_tree == NULL)
814 return;
815
816 for (i = cfun->eh->last_region_number; i > 0; --i)
817 {
818 struct eh_region *region;
819 rtx lab;
820
821 region = VEC_index (eh_region, cfun->eh->region_array, i);
822 if (! region || region->region_number != i)
823 continue;
824 if (cfun->eh->built_landing_pads)
825 lab = region->landing_pad;
826 else
827 lab = region->label;
828
829 if (lab)
830 add_ehl_entry (lab, region);
831 }
832
833 /* For sjlj exceptions, need the return label to remain live until
834 after landing pad generation. */
835 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
836 add_ehl_entry (return_label, NULL);
837 }
838
839 /* Returns true if the current function has exception handling regions. */
840
841 bool
current_function_has_exception_handlers(void)842 current_function_has_exception_handlers (void)
843 {
844 int i;
845
846 for (i = cfun->eh->last_region_number; i > 0; --i)
847 {
848 struct eh_region *region;
849
850 region = VEC_index (eh_region, cfun->eh->region_array, i);
851 if (region
852 && region->region_number == i
853 && region->type != ERT_THROW)
854 return true;
855 }
856
857 return false;
858 }
859
860 /* A subroutine of duplicate_eh_regions. Search the region tree under O
861 for the minimum and maximum region numbers. Update *MIN and *MAX. */
862
863 static void
duplicate_eh_regions_0(eh_region o,int * min,int * max)864 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
865 {
866 if (o->region_number < *min)
867 *min = o->region_number;
868 if (o->region_number > *max)
869 *max = o->region_number;
870
871 if (o->inner)
872 {
873 o = o->inner;
874 duplicate_eh_regions_0 (o, min, max);
875 while (o->next_peer)
876 {
877 o = o->next_peer;
878 duplicate_eh_regions_0 (o, min, max);
879 }
880 }
881 }
882
883 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
884 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
885 about the other internal pointers just yet, just the tree-like pointers. */
886
887 static eh_region
duplicate_eh_regions_1(eh_region old,eh_region outer,int eh_offset)888 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
889 {
890 eh_region ret, n;
891
892 ret = n = ggc_alloc (sizeof (struct eh_region));
893
894 *n = *old;
895 n->outer = outer;
896 n->next_peer = NULL;
897 gcc_assert (!old->aka);
898
899 n->region_number += eh_offset;
900 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
901
902 if (old->inner)
903 {
904 old = old->inner;
905 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
906 while (old->next_peer)
907 {
908 old = old->next_peer;
909 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
910 }
911 }
912
913 return ret;
914 }
915
916 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
917 function and root the tree below OUTER_REGION. Remap labels using MAP
918 callback. The special case of COPY_REGION of 0 means all regions. */
919
920 int
duplicate_eh_regions(struct function * ifun,duplicate_eh_regions_map map,void * data,int copy_region,int outer_region)921 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
922 void *data, int copy_region, int outer_region)
923 {
924 eh_region cur, prev_try, outer, *splice;
925 int i, min_region, max_region, eh_offset, cfun_last_region_number;
926 int num_regions;
927
928 if (!ifun->eh->region_tree)
929 return 0;
930
931 /* Find the range of region numbers to be copied. The interface we
932 provide here mandates a single offset to find new number from old,
933 which means we must look at the numbers present, instead of the
934 count or something else. */
935 if (copy_region > 0)
936 {
937 min_region = INT_MAX;
938 max_region = 0;
939
940 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
941 duplicate_eh_regions_0 (cur, &min_region, &max_region);
942 }
943 else
944 min_region = 1, max_region = ifun->eh->last_region_number;
945 num_regions = max_region - min_region + 1;
946 cfun_last_region_number = cfun->eh->last_region_number;
947 eh_offset = cfun_last_region_number + 1 - min_region;
948
949 /* If we've not yet created a region array, do so now. */
950 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
951 cfun_last_region_number + 1 + num_regions);
952 cfun->eh->last_region_number = max_region + eh_offset;
953
954 /* We may have just allocated the array for the first time.
955 Make sure that element zero is null. */
956 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
957
958 /* Zero all entries in the range allocated. */
959 memset (VEC_address (eh_region, cfun->eh->region_array)
960 + cfun_last_region_number + 1, 0, num_regions * sizeof (eh_region));
961
962 /* Locate the spot at which to insert the new tree. */
963 if (outer_region > 0)
964 {
965 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
966 splice = &outer->inner;
967 }
968 else
969 {
970 outer = NULL;
971 splice = &cfun->eh->region_tree;
972 }
973 while (*splice)
974 splice = &(*splice)->next_peer;
975
976 /* Copy all the regions in the subtree. */
977 if (copy_region > 0)
978 {
979 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
980 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
981 }
982 else
983 {
984 eh_region n;
985
986 cur = ifun->eh->region_tree;
987 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
988 while (cur->next_peer)
989 {
990 cur = cur->next_peer;
991 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
992 }
993 }
994
995 /* Remap all the labels in the new regions. */
996 for (i = cfun_last_region_number + 1;
997 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
998 if (cur && cur->tree_label)
999 cur->tree_label = map (cur->tree_label, data);
1000
1001 /* Search for the containing ERT_TRY region to fix up
1002 the prev_try short-cuts for ERT_CLEANUP regions. */
1003 prev_try = NULL;
1004 if (outer_region > 0)
1005 for (prev_try = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1006 prev_try && prev_try->type != ERT_TRY;
1007 prev_try = prev_try->outer)
1008 if (prev_try->type == ERT_MUST_NOT_THROW)
1009 {
1010 prev_try = NULL;
1011 break;
1012 }
1013
1014 /* Remap all of the internal catch and cleanup linkages. Since we
1015 duplicate entire subtrees, all of the referenced regions will have
1016 been copied too. And since we renumbered them as a block, a simple
1017 bit of arithmetic finds us the index for the replacement region. */
1018 for (i = cfun_last_region_number + 1;
1019 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1020 {
1021 if (cur == NULL)
1022 continue;
1023
1024 #define REMAP(REG) \
1025 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1026 (REG)->region_number + eh_offset)
1027
1028 switch (cur->type)
1029 {
1030 case ERT_TRY:
1031 if (cur->u.try.catch)
1032 REMAP (cur->u.try.catch);
1033 if (cur->u.try.last_catch)
1034 REMAP (cur->u.try.last_catch);
1035 break;
1036
1037 case ERT_CATCH:
1038 if (cur->u.catch.next_catch)
1039 REMAP (cur->u.catch.next_catch);
1040 if (cur->u.catch.prev_catch)
1041 REMAP (cur->u.catch.prev_catch);
1042 break;
1043
1044 case ERT_CLEANUP:
1045 if (cur->u.cleanup.prev_try)
1046 REMAP (cur->u.cleanup.prev_try);
1047 else
1048 cur->u.cleanup.prev_try = prev_try;
1049 break;
1050
1051 default:
1052 break;
1053 }
1054
1055 #undef REMAP
1056 }
1057
1058 return eh_offset;
1059 }
1060
1061 /* Return true if REGION_A is outer to REGION_B in IFUN. */
1062
1063 bool
eh_region_outer_p(struct function * ifun,int region_a,int region_b)1064 eh_region_outer_p (struct function *ifun, int region_a, int region_b)
1065 {
1066 struct eh_region *rp_a, *rp_b;
1067
1068 gcc_assert (ifun->eh->last_region_number > 0);
1069 gcc_assert (ifun->eh->region_tree);
1070
1071 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1072 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1073 gcc_assert (rp_a != NULL);
1074 gcc_assert (rp_b != NULL);
1075
1076 do
1077 {
1078 if (rp_a == rp_b)
1079 return true;
1080 rp_b = rp_b->outer;
1081 }
1082 while (rp_b);
1083
1084 return false;
1085 }
1086
1087 /* Return region number of region that is outer to both if REGION_A and
1088 REGION_B in IFUN. */
1089
1090 int
eh_region_outermost(struct function * ifun,int region_a,int region_b)1091 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1092 {
1093 struct eh_region *rp_a, *rp_b;
1094 sbitmap b_outer;
1095
1096 gcc_assert (ifun->eh->last_region_number > 0);
1097 gcc_assert (ifun->eh->region_tree);
1098
1099 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1100 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1101 gcc_assert (rp_a != NULL);
1102 gcc_assert (rp_b != NULL);
1103
1104 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1105 sbitmap_zero (b_outer);
1106
1107 do
1108 {
1109 SET_BIT (b_outer, rp_b->region_number);
1110 rp_b = rp_b->outer;
1111 }
1112 while (rp_b);
1113
1114 do
1115 {
1116 if (TEST_BIT (b_outer, rp_a->region_number))
1117 {
1118 sbitmap_free (b_outer);
1119 return rp_a->region_number;
1120 }
1121 rp_a = rp_a->outer;
1122 }
1123 while (rp_a);
1124
1125 sbitmap_free (b_outer);
1126 return -1;
1127 }
1128
1129 static int
t2r_eq(const void * pentry,const void * pdata)1130 t2r_eq (const void *pentry, const void *pdata)
1131 {
1132 tree entry = (tree) pentry;
1133 tree data = (tree) pdata;
1134
1135 return TREE_PURPOSE (entry) == data;
1136 }
1137
1138 static hashval_t
t2r_hash(const void * pentry)1139 t2r_hash (const void *pentry)
1140 {
1141 tree entry = (tree) pentry;
1142 return TREE_HASH (TREE_PURPOSE (entry));
1143 }
1144
1145 static void
add_type_for_runtime(tree type)1146 add_type_for_runtime (tree type)
1147 {
1148 tree *slot;
1149
1150 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1151 TREE_HASH (type), INSERT);
1152 if (*slot == NULL)
1153 {
1154 tree runtime = (*lang_eh_runtime_type) (type);
1155 *slot = tree_cons (type, runtime, NULL_TREE);
1156 }
1157 }
1158
1159 static tree
lookup_type_for_runtime(tree type)1160 lookup_type_for_runtime (tree type)
1161 {
1162 tree *slot;
1163
1164 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1165 TREE_HASH (type), NO_INSERT);
1166
1167 /* We should have always inserted the data earlier. */
1168 return TREE_VALUE (*slot);
1169 }
1170
1171
1172 /* Represent an entry in @TTypes for either catch actions
1173 or exception filter actions. */
1174 struct ttypes_filter GTY(())
1175 {
1176 tree t;
1177 int filter;
1178 };
1179
1180 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1181 (a tree) for a @TTypes type node we are thinking about adding. */
1182
1183 static int
ttypes_filter_eq(const void * pentry,const void * pdata)1184 ttypes_filter_eq (const void *pentry, const void *pdata)
1185 {
1186 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1187 tree data = (tree) pdata;
1188
1189 return entry->t == data;
1190 }
1191
1192 static hashval_t
ttypes_filter_hash(const void * pentry)1193 ttypes_filter_hash (const void *pentry)
1194 {
1195 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1196 return TREE_HASH (entry->t);
1197 }
1198
1199 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1200 exception specification list we are thinking about adding. */
1201 /* ??? Currently we use the type lists in the order given. Someone
1202 should put these in some canonical order. */
1203
1204 static int
ehspec_filter_eq(const void * pentry,const void * pdata)1205 ehspec_filter_eq (const void *pentry, const void *pdata)
1206 {
1207 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1208 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1209
1210 return type_list_equal (entry->t, data->t);
1211 }
1212
1213 /* Hash function for exception specification lists. */
1214
1215 static hashval_t
ehspec_filter_hash(const void * pentry)1216 ehspec_filter_hash (const void *pentry)
1217 {
1218 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1219 hashval_t h = 0;
1220 tree list;
1221
1222 for (list = entry->t; list ; list = TREE_CHAIN (list))
1223 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1224 return h;
1225 }
1226
1227 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1228 to speed up the search. Return the filter value to be used. */
1229
1230 static int
add_ttypes_entry(htab_t ttypes_hash,tree type)1231 add_ttypes_entry (htab_t ttypes_hash, tree type)
1232 {
1233 struct ttypes_filter **slot, *n;
1234
1235 slot = (struct ttypes_filter **)
1236 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1237
1238 if ((n = *slot) == NULL)
1239 {
1240 /* Filter value is a 1 based table index. */
1241
1242 n = XNEW (struct ttypes_filter);
1243 n->t = type;
1244 n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
1245 *slot = n;
1246
1247 VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
1248 }
1249
1250 return n->filter;
1251 }
1252
1253 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1254 to speed up the search. Return the filter value to be used. */
1255
1256 static int
add_ehspec_entry(htab_t ehspec_hash,htab_t ttypes_hash,tree list)1257 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1258 {
1259 struct ttypes_filter **slot, *n;
1260 struct ttypes_filter dummy;
1261
1262 dummy.t = list;
1263 slot = (struct ttypes_filter **)
1264 htab_find_slot (ehspec_hash, &dummy, INSERT);
1265
1266 if ((n = *slot) == NULL)
1267 {
1268 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1269
1270 n = XNEW (struct ttypes_filter);
1271 n->t = list;
1272 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1273 *slot = n;
1274
1275 /* Generate a 0 terminated list of filter values. */
1276 for (; list ; list = TREE_CHAIN (list))
1277 {
1278 if (targetm.arm_eabi_unwinder)
1279 VARRAY_PUSH_TREE (cfun->eh->ehspec_data, TREE_VALUE (list));
1280 else
1281 {
1282 /* Look up each type in the list and encode its filter
1283 value as a uleb128. */
1284 push_uleb128 (&cfun->eh->ehspec_data,
1285 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1286 }
1287 }
1288 if (targetm.arm_eabi_unwinder)
1289 VARRAY_PUSH_TREE (cfun->eh->ehspec_data, NULL_TREE);
1290 else
1291 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1292 }
1293
1294 return n->filter;
1295 }
1296
1297 /* Generate the action filter values to be used for CATCH and
1298 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1299 we use lots of landing pads, and so every type or list can share
1300 the same filter value, which saves table space. */
1301
1302 static void
assign_filter_values(void)1303 assign_filter_values (void)
1304 {
1305 int i;
1306 htab_t ttypes, ehspec;
1307
1308 cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
1309 if (targetm.arm_eabi_unwinder)
1310 VARRAY_TREE_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1311 else
1312 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1313
1314 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1315 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1316
1317 for (i = cfun->eh->last_region_number; i > 0; --i)
1318 {
1319 struct eh_region *r;
1320
1321 r = VEC_index (eh_region, cfun->eh->region_array, i);
1322
1323 /* Mind we don't process a region more than once. */
1324 if (!r || r->region_number != i)
1325 continue;
1326
1327 switch (r->type)
1328 {
1329 case ERT_CATCH:
1330 /* Whatever type_list is (NULL or true list), we build a list
1331 of filters for the region. */
1332 r->u.catch.filter_list = NULL_TREE;
1333
1334 if (r->u.catch.type_list != NULL)
1335 {
1336 /* Get a filter value for each of the types caught and store
1337 them in the region's dedicated list. */
1338 tree tp_node = r->u.catch.type_list;
1339
1340 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1341 {
1342 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1343 tree flt_node = build_int_cst (NULL_TREE, flt);
1344
1345 r->u.catch.filter_list
1346 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1347 }
1348 }
1349 else
1350 {
1351 /* Get a filter value for the NULL list also since it will need
1352 an action record anyway. */
1353 int flt = add_ttypes_entry (ttypes, NULL);
1354 tree flt_node = build_int_cst (NULL_TREE, flt);
1355
1356 r->u.catch.filter_list
1357 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1358 }
1359
1360 break;
1361
1362 case ERT_ALLOWED_EXCEPTIONS:
1363 r->u.allowed.filter
1364 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1365 break;
1366
1367 default:
1368 break;
1369 }
1370 }
1371
1372 htab_delete (ttypes);
1373 htab_delete (ehspec);
1374 }
1375
1376 /* Emit SEQ into basic block just before INSN (that is assumed to be
1377 first instruction of some existing BB and return the newly
1378 produced block. */
1379 static basic_block
emit_to_new_bb_before(rtx seq,rtx insn)1380 emit_to_new_bb_before (rtx seq, rtx insn)
1381 {
1382 rtx last;
1383 basic_block bb;
1384 edge e;
1385 edge_iterator ei;
1386
1387 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1388 call), we don't want it to go into newly created landing pad or other EH
1389 construct. */
1390 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1391 if (e->flags & EDGE_FALLTHRU)
1392 force_nonfallthru (e);
1393 else
1394 ei_next (&ei);
1395 last = emit_insn_before (seq, insn);
1396 if (BARRIER_P (last))
1397 last = PREV_INSN (last);
1398 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1399 update_bb_for_insn (bb);
1400 bb->flags |= BB_SUPERBLOCK;
1401 return bb;
1402 }
1403
1404 /* Generate the code to actually handle exceptions, which will follow the
1405 landing pads. */
1406
1407 static void
build_post_landing_pads(void)1408 build_post_landing_pads (void)
1409 {
1410 int i;
1411
1412 for (i = cfun->eh->last_region_number; i > 0; --i)
1413 {
1414 struct eh_region *region;
1415 rtx seq;
1416
1417 region = VEC_index (eh_region, cfun->eh->region_array, i);
1418 /* Mind we don't process a region more than once. */
1419 if (!region || region->region_number != i)
1420 continue;
1421
1422 switch (region->type)
1423 {
1424 case ERT_TRY:
1425 /* ??? Collect the set of all non-overlapping catch handlers
1426 all the way up the chain until blocked by a cleanup. */
1427 /* ??? Outer try regions can share landing pads with inner
1428 try regions if the types are completely non-overlapping,
1429 and there are no intervening cleanups. */
1430
1431 region->post_landing_pad = gen_label_rtx ();
1432
1433 start_sequence ();
1434
1435 emit_label (region->post_landing_pad);
1436
1437 /* ??? It is mighty inconvenient to call back into the
1438 switch statement generation code in expand_end_case.
1439 Rapid prototyping sez a sequence of ifs. */
1440 {
1441 struct eh_region *c;
1442 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1443 {
1444 if (c->u.catch.type_list == NULL)
1445 emit_jump (c->label);
1446 else
1447 {
1448 /* Need for one cmp/jump per type caught. Each type
1449 list entry has a matching entry in the filter list
1450 (see assign_filter_values). */
1451 tree tp_node = c->u.catch.type_list;
1452 tree flt_node = c->u.catch.filter_list;
1453
1454 for (; tp_node; )
1455 {
1456 emit_cmp_and_jump_insns
1457 (cfun->eh->filter,
1458 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1459 EQ, NULL_RTX,
1460 targetm.eh_return_filter_mode (), 0, c->label);
1461
1462 tp_node = TREE_CHAIN (tp_node);
1463 flt_node = TREE_CHAIN (flt_node);
1464 }
1465 }
1466 }
1467 }
1468
1469 /* We delay the generation of the _Unwind_Resume until we generate
1470 landing pads. We emit a marker here so as to get good control
1471 flow data in the meantime. */
1472 region->resume
1473 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1474 emit_barrier ();
1475
1476 seq = get_insns ();
1477 end_sequence ();
1478
1479 emit_to_new_bb_before (seq, region->u.try.catch->label);
1480
1481 break;
1482
1483 case ERT_ALLOWED_EXCEPTIONS:
1484 region->post_landing_pad = gen_label_rtx ();
1485
1486 start_sequence ();
1487
1488 emit_label (region->post_landing_pad);
1489
1490 emit_cmp_and_jump_insns (cfun->eh->filter,
1491 GEN_INT (region->u.allowed.filter),
1492 EQ, NULL_RTX,
1493 targetm.eh_return_filter_mode (), 0, region->label);
1494
1495 /* We delay the generation of the _Unwind_Resume until we generate
1496 landing pads. We emit a marker here so as to get good control
1497 flow data in the meantime. */
1498 region->resume
1499 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1500 emit_barrier ();
1501
1502 seq = get_insns ();
1503 end_sequence ();
1504
1505 emit_to_new_bb_before (seq, region->label);
1506 break;
1507
1508 case ERT_CLEANUP:
1509 case ERT_MUST_NOT_THROW:
1510 region->post_landing_pad = region->label;
1511 break;
1512
1513 case ERT_CATCH:
1514 case ERT_THROW:
1515 /* Nothing to do. */
1516 break;
1517
1518 default:
1519 gcc_unreachable ();
1520 }
1521 }
1522 }
1523
1524 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1525 _Unwind_Resume otherwise. */
1526
1527 static void
connect_post_landing_pads(void)1528 connect_post_landing_pads (void)
1529 {
1530 int i;
1531
1532 for (i = cfun->eh->last_region_number; i > 0; --i)
1533 {
1534 struct eh_region *region;
1535 struct eh_region *outer;
1536 rtx seq;
1537 rtx barrier;
1538
1539 region = VEC_index (eh_region, cfun->eh->region_array, i);
1540 /* Mind we don't process a region more than once. */
1541 if (!region || region->region_number != i)
1542 continue;
1543
1544 /* If there is no RESX, or it has been deleted by flow, there's
1545 nothing to fix up. */
1546 if (! region->resume || INSN_DELETED_P (region->resume))
1547 continue;
1548
1549 /* Search for another landing pad in this function. */
1550 for (outer = region->outer; outer ; outer = outer->outer)
1551 if (outer->post_landing_pad)
1552 break;
1553
1554 start_sequence ();
1555
1556 if (outer)
1557 {
1558 edge e;
1559 basic_block src, dest;
1560
1561 emit_jump (outer->post_landing_pad);
1562 src = BLOCK_FOR_INSN (region->resume);
1563 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1564 while (EDGE_COUNT (src->succs) > 0)
1565 remove_edge (EDGE_SUCC (src, 0));
1566 e = make_edge (src, dest, 0);
1567 e->probability = REG_BR_PROB_BASE;
1568 e->count = src->count;
1569 }
1570 else
1571 {
1572 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1573 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1574
1575 /* What we just emitted was a throwing libcall, so it got a
1576 barrier automatically added after it. If the last insn in
1577 the libcall sequence isn't the barrier, it's because the
1578 target emits multiple insns for a call, and there are insns
1579 after the actual call insn (which are redundant and would be
1580 optimized away). The barrier is inserted exactly after the
1581 call insn, so let's go get that and delete the insns after
1582 it, because below we need the barrier to be the last insn in
1583 the sequence. */
1584 delete_insns_since (NEXT_INSN (last_call_insn ()));
1585 }
1586
1587 seq = get_insns ();
1588 end_sequence ();
1589 barrier = emit_insn_before (seq, region->resume);
1590 /* Avoid duplicate barrier. */
1591 gcc_assert (BARRIER_P (barrier));
1592 delete_insn (barrier);
1593 delete_insn (region->resume);
1594
1595 /* ??? From tree-ssa we can wind up with catch regions whose
1596 label is not instantiated, but whose resx is present. Now
1597 that we've dealt with the resx, kill the region. */
1598 if (region->label == NULL && region->type == ERT_CLEANUP)
1599 remove_eh_handler (region);
1600 }
1601 }
1602
1603
1604 static void
dw2_build_landing_pads(void)1605 dw2_build_landing_pads (void)
1606 {
1607 int i;
1608
1609 for (i = cfun->eh->last_region_number; i > 0; --i)
1610 {
1611 struct eh_region *region;
1612 rtx seq;
1613 basic_block bb;
1614 edge e;
1615
1616 region = VEC_index (eh_region, cfun->eh->region_array, i);
1617 /* Mind we don't process a region more than once. */
1618 if (!region || region->region_number != i)
1619 continue;
1620
1621 if (region->type != ERT_CLEANUP
1622 && region->type != ERT_TRY
1623 && region->type != ERT_ALLOWED_EXCEPTIONS)
1624 continue;
1625
1626 start_sequence ();
1627
1628 region->landing_pad = gen_label_rtx ();
1629 emit_label (region->landing_pad);
1630
1631 #ifdef HAVE_exception_receiver
1632 if (HAVE_exception_receiver)
1633 emit_insn (gen_exception_receiver ());
1634 else
1635 #endif
1636 #ifdef HAVE_nonlocal_goto_receiver
1637 if (HAVE_nonlocal_goto_receiver)
1638 emit_insn (gen_nonlocal_goto_receiver ());
1639 else
1640 #endif
1641 { /* Nothing */ }
1642
1643 emit_move_insn (cfun->eh->exc_ptr,
1644 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1645 emit_move_insn (cfun->eh->filter,
1646 gen_rtx_REG (targetm.eh_return_filter_mode (),
1647 EH_RETURN_DATA_REGNO (1)));
1648
1649 seq = get_insns ();
1650 end_sequence ();
1651
1652 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1653 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1654 e->count = bb->count;
1655 e->probability = REG_BR_PROB_BASE;
1656 }
1657 }
1658
1659
1660 struct sjlj_lp_info
1661 {
1662 int directly_reachable;
1663 int action_index;
1664 int dispatch_index;
1665 int call_site_index;
1666 };
1667
1668 static bool
sjlj_find_directly_reachable_regions(struct sjlj_lp_info * lp_info)1669 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1670 {
1671 rtx insn;
1672 bool found_one = false;
1673
1674 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1675 {
1676 struct eh_region *region;
1677 enum reachable_code rc;
1678 tree type_thrown;
1679 rtx note;
1680
1681 if (! INSN_P (insn))
1682 continue;
1683
1684 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1685 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1686 continue;
1687
1688 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1689
1690 type_thrown = NULL_TREE;
1691 if (region->type == ERT_THROW)
1692 {
1693 type_thrown = region->u.throw.type;
1694 region = region->outer;
1695 }
1696
1697 /* Find the first containing region that might handle the exception.
1698 That's the landing pad to which we will transfer control. */
1699 rc = RNL_NOT_CAUGHT;
1700 for (; region; region = region->outer)
1701 {
1702 rc = reachable_next_level (region, type_thrown, NULL);
1703 if (rc != RNL_NOT_CAUGHT)
1704 break;
1705 }
1706 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1707 {
1708 lp_info[region->region_number].directly_reachable = 1;
1709 found_one = true;
1710 }
1711 }
1712
1713 return found_one;
1714 }
1715
1716 static void
sjlj_assign_call_site_values(rtx dispatch_label,struct sjlj_lp_info * lp_info)1717 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1718 {
1719 htab_t ar_hash;
1720 int i, index;
1721
1722 /* First task: build the action table. */
1723
1724 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1725 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1726
1727 for (i = cfun->eh->last_region_number; i > 0; --i)
1728 if (lp_info[i].directly_reachable)
1729 {
1730 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1731
1732 r->landing_pad = dispatch_label;
1733 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1734 if (lp_info[i].action_index != -1)
1735 cfun->uses_eh_lsda = 1;
1736 }
1737
1738 htab_delete (ar_hash);
1739
1740 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1741 landing pad label for the region. For sjlj though, there is one
1742 common landing pad from which we dispatch to the post-landing pads.
1743
1744 A region receives a dispatch index if it is directly reachable
1745 and requires in-function processing. Regions that share post-landing
1746 pads may share dispatch indices. */
1747 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1748 (see build_post_landing_pads) so we don't bother checking for it. */
1749
1750 index = 0;
1751 for (i = cfun->eh->last_region_number; i > 0; --i)
1752 if (lp_info[i].directly_reachable)
1753 lp_info[i].dispatch_index = index++;
1754
1755 /* Finally: assign call-site values. If dwarf2 terms, this would be
1756 the region number assigned by convert_to_eh_region_ranges, but
1757 handles no-action and must-not-throw differently. */
1758
1759 call_site_base = 1;
1760 for (i = cfun->eh->last_region_number; i > 0; --i)
1761 if (lp_info[i].directly_reachable)
1762 {
1763 int action = lp_info[i].action_index;
1764
1765 /* Map must-not-throw to otherwise unused call-site index 0. */
1766 if (action == -2)
1767 index = 0;
1768 /* Map no-action to otherwise unused call-site index -1. */
1769 else if (action == -1)
1770 index = -1;
1771 /* Otherwise, look it up in the table. */
1772 else
1773 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1774
1775 lp_info[i].call_site_index = index;
1776 }
1777 }
1778
1779 static void
sjlj_mark_call_sites(struct sjlj_lp_info * lp_info)1780 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1781 {
1782 int last_call_site = -2;
1783 rtx insn, mem;
1784
1785 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1786 {
1787 struct eh_region *region;
1788 int this_call_site;
1789 rtx note, before, p;
1790
1791 /* Reset value tracking at extended basic block boundaries. */
1792 if (LABEL_P (insn))
1793 last_call_site = -2;
1794
1795 if (! INSN_P (insn))
1796 continue;
1797
1798 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1799 if (!note)
1800 {
1801 /* Calls (and trapping insns) without notes are outside any
1802 exception handling region in this function. Mark them as
1803 no action. */
1804 if (CALL_P (insn)
1805 || (flag_non_call_exceptions
1806 && may_trap_p (PATTERN (insn))))
1807 this_call_site = -1;
1808 else
1809 continue;
1810 }
1811 else
1812 {
1813 /* Calls that are known to not throw need not be marked. */
1814 if (INTVAL (XEXP (note, 0)) <= 0)
1815 continue;
1816
1817 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1818 this_call_site = lp_info[region->region_number].call_site_index;
1819 }
1820
1821 if (this_call_site == last_call_site)
1822 continue;
1823
1824 /* Don't separate a call from it's argument loads. */
1825 before = insn;
1826 if (CALL_P (insn))
1827 before = find_first_parameter_load (insn, NULL_RTX);
1828
1829 start_sequence ();
1830 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1831 sjlj_fc_call_site_ofs);
1832 emit_move_insn (mem, GEN_INT (this_call_site));
1833 p = get_insns ();
1834 end_sequence ();
1835
1836 emit_insn_before (p, before);
1837 last_call_site = this_call_site;
1838 }
1839 }
1840
1841 /* Construct the SjLj_Function_Context. */
1842
1843 static void
sjlj_emit_function_enter(rtx dispatch_label)1844 sjlj_emit_function_enter (rtx dispatch_label)
1845 {
1846 rtx fn_begin, fc, mem, seq;
1847 bool fn_begin_outside_block;
1848
1849 fc = cfun->eh->sjlj_fc;
1850
1851 start_sequence ();
1852
1853 /* We're storing this libcall's address into memory instead of
1854 calling it directly. Thus, we must call assemble_external_libcall
1855 here, as we can not depend on emit_library_call to do it for us. */
1856 assemble_external_libcall (eh_personality_libfunc);
1857 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1858 emit_move_insn (mem, eh_personality_libfunc);
1859
1860 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1861 if (cfun->uses_eh_lsda)
1862 {
1863 char buf[20];
1864 rtx sym;
1865
1866 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1867 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1868 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1869 emit_move_insn (mem, sym);
1870 }
1871 else
1872 emit_move_insn (mem, const0_rtx);
1873
1874 #ifdef DONT_USE_BUILTIN_SETJMP
1875 {
1876 rtx x, note;
1877 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1878 TYPE_MODE (integer_type_node), 1,
1879 plus_constant (XEXP (fc, 0),
1880 sjlj_fc_jbuf_ofs), Pmode);
1881
1882 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
1883 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1884
1885 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1886 TYPE_MODE (integer_type_node), 0, dispatch_label);
1887 }
1888 #else
1889 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1890 dispatch_label);
1891 #endif
1892
1893 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1894 1, XEXP (fc, 0), Pmode);
1895
1896 seq = get_insns ();
1897 end_sequence ();
1898
1899 /* ??? Instead of doing this at the beginning of the function,
1900 do this in a block that is at loop level 0 and dominates all
1901 can_throw_internal instructions. */
1902
1903 fn_begin_outside_block = true;
1904 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1905 if (NOTE_P (fn_begin))
1906 {
1907 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1908 break;
1909 else if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK)
1910 fn_begin_outside_block = false;
1911 }
1912
1913 if (fn_begin_outside_block)
1914 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1915 else
1916 emit_insn_after (seq, fn_begin);
1917 }
1918
1919 /* Call back from expand_function_end to know where we should put
1920 the call to unwind_sjlj_unregister_libfunc if needed. */
1921
1922 void
sjlj_emit_function_exit_after(rtx after)1923 sjlj_emit_function_exit_after (rtx after)
1924 {
1925 cfun->eh->sjlj_exit_after = after;
1926 }
1927
1928 static void
sjlj_emit_function_exit(void)1929 sjlj_emit_function_exit (void)
1930 {
1931 rtx seq;
1932 edge e;
1933 edge_iterator ei;
1934
1935 start_sequence ();
1936
1937 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1938 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
1939
1940 seq = get_insns ();
1941 end_sequence ();
1942
1943 /* ??? Really this can be done in any block at loop level 0 that
1944 post-dominates all can_throw_internal instructions. This is
1945 the last possible moment. */
1946
1947 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1948 if (e->flags & EDGE_FALLTHRU)
1949 break;
1950 if (e)
1951 {
1952 rtx insn;
1953
1954 /* Figure out whether the place we are supposed to insert libcall
1955 is inside the last basic block or after it. In the other case
1956 we need to emit to edge. */
1957 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1958 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1959 {
1960 if (insn == cfun->eh->sjlj_exit_after)
1961 {
1962 if (LABEL_P (insn))
1963 insn = NEXT_INSN (insn);
1964 emit_insn_after (seq, insn);
1965 return;
1966 }
1967 if (insn == BB_END (e->src))
1968 break;
1969 }
1970 insert_insn_on_edge (seq, e);
1971 }
1972 }
1973
1974 static void
sjlj_emit_dispatch_table(rtx dispatch_label,struct sjlj_lp_info * lp_info)1975 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1976 {
1977 int i, first_reachable;
1978 rtx mem, dispatch, seq, fc;
1979 rtx before;
1980 basic_block bb;
1981 edge e;
1982
1983 fc = cfun->eh->sjlj_fc;
1984
1985 start_sequence ();
1986
1987 emit_label (dispatch_label);
1988
1989 #ifndef DONT_USE_BUILTIN_SETJMP
1990 expand_builtin_setjmp_receiver (dispatch_label);
1991 #endif
1992
1993 /* Load up dispatch index, exc_ptr and filter values from the
1994 function context. */
1995 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1996 sjlj_fc_call_site_ofs);
1997 dispatch = copy_to_reg (mem);
1998
1999 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2000 if (word_mode != ptr_mode)
2001 {
2002 #ifdef POINTERS_EXTEND_UNSIGNED
2003 mem = convert_memory_address (ptr_mode, mem);
2004 #else
2005 mem = convert_to_mode (ptr_mode, mem, 0);
2006 #endif
2007 }
2008 emit_move_insn (cfun->eh->exc_ptr, mem);
2009
2010 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2011 emit_move_insn (cfun->eh->filter, mem);
2012
2013 /* Jump to one of the directly reachable regions. */
2014 /* ??? This really ought to be using a switch statement. */
2015
2016 first_reachable = 0;
2017 for (i = cfun->eh->last_region_number; i > 0; --i)
2018 {
2019 if (! lp_info[i].directly_reachable)
2020 continue;
2021
2022 if (! first_reachable)
2023 {
2024 first_reachable = i;
2025 continue;
2026 }
2027
2028 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2029 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2030 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2031 ->post_landing_pad);
2032 }
2033
2034 seq = get_insns ();
2035 end_sequence ();
2036
2037 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2038 ->post_landing_pad);
2039
2040 bb = emit_to_new_bb_before (seq, before);
2041 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2042 e->count = bb->count;
2043 e->probability = REG_BR_PROB_BASE;
2044 }
2045
2046 static void
sjlj_build_landing_pads(void)2047 sjlj_build_landing_pads (void)
2048 {
2049 struct sjlj_lp_info *lp_info;
2050
2051 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2052
2053 if (sjlj_find_directly_reachable_regions (lp_info))
2054 {
2055 rtx dispatch_label = gen_label_rtx ();
2056
2057 cfun->eh->sjlj_fc
2058 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2059 int_size_in_bytes (sjlj_fc_type_node),
2060 TYPE_ALIGN (sjlj_fc_type_node));
2061
2062 sjlj_assign_call_site_values (dispatch_label, lp_info);
2063 sjlj_mark_call_sites (lp_info);
2064
2065 sjlj_emit_function_enter (dispatch_label);
2066 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2067 sjlj_emit_function_exit ();
2068 }
2069
2070 free (lp_info);
2071 }
2072
2073 void
finish_eh_generation(void)2074 finish_eh_generation (void)
2075 {
2076 basic_block bb;
2077
2078 /* Nothing to do if no regions created. */
2079 if (cfun->eh->region_tree == NULL)
2080 return;
2081
2082 /* The object here is to provide find_basic_blocks with detailed
2083 information (via reachable_handlers) on how exception control
2084 flows within the function. In this first pass, we can include
2085 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2086 regions, and hope that it will be useful in deleting unreachable
2087 handlers. Subsequently, we will generate landing pads which will
2088 connect many of the handlers, and then type information will not
2089 be effective. Still, this is a win over previous implementations. */
2090
2091 /* These registers are used by the landing pads. Make sure they
2092 have been generated. */
2093 get_exception_pointer (cfun);
2094 get_exception_filter (cfun);
2095
2096 /* Construct the landing pads. */
2097
2098 assign_filter_values ();
2099 build_post_landing_pads ();
2100 connect_post_landing_pads ();
2101 if (USING_SJLJ_EXCEPTIONS)
2102 sjlj_build_landing_pads ();
2103 else
2104 dw2_build_landing_pads ();
2105
2106 cfun->eh->built_landing_pads = 1;
2107
2108 /* We've totally changed the CFG. Start over. */
2109 find_exception_handler_labels ();
2110 break_superblocks ();
2111 if (USING_SJLJ_EXCEPTIONS)
2112 commit_edge_insertions ();
2113 FOR_EACH_BB (bb)
2114 {
2115 edge e;
2116 edge_iterator ei;
2117 bool eh = false;
2118 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2119 {
2120 if (e->flags & EDGE_EH)
2121 {
2122 remove_edge (e);
2123 eh = true;
2124 }
2125 else
2126 ei_next (&ei);
2127 }
2128 if (eh)
2129 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2130 }
2131 }
2132
2133 static hashval_t
ehl_hash(const void * pentry)2134 ehl_hash (const void *pentry)
2135 {
2136 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2137
2138 /* 2^32 * ((sqrt(5) - 1) / 2) */
2139 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2140 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2141 }
2142
2143 static int
ehl_eq(const void * pentry,const void * pdata)2144 ehl_eq (const void *pentry, const void *pdata)
2145 {
2146 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2147 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2148
2149 return entry->label == data->label;
2150 }
2151
2152 /* This section handles removing dead code for flow. */
2153
2154 /* Remove LABEL from exception_handler_label_map. */
2155
2156 static void
remove_exception_handler_label(rtx label)2157 remove_exception_handler_label (rtx label)
2158 {
2159 struct ehl_map_entry **slot, tmp;
2160
2161 /* If exception_handler_label_map was not built yet,
2162 there is nothing to do. */
2163 if (cfun->eh->exception_handler_label_map == NULL)
2164 return;
2165
2166 tmp.label = label;
2167 slot = (struct ehl_map_entry **)
2168 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2169 gcc_assert (slot);
2170
2171 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2172 }
2173
2174 /* Splice REGION from the region tree etc. */
2175
2176 static void
remove_eh_handler(struct eh_region * region)2177 remove_eh_handler (struct eh_region *region)
2178 {
2179 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2180 rtx lab;
2181
2182 /* For the benefit of efficiently handling REG_EH_REGION notes,
2183 replace this region in the region array with its containing
2184 region. Note that previous region deletions may result in
2185 multiple copies of this region in the array, so we have a
2186 list of alternate numbers by which we are known. */
2187
2188 outer = region->outer;
2189 VEC_replace (eh_region, cfun->eh->region_array, region->region_number, outer);
2190 if (region->aka)
2191 {
2192 unsigned i;
2193 bitmap_iterator bi;
2194
2195 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2196 {
2197 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
2198 }
2199 }
2200
2201 if (outer)
2202 {
2203 if (!outer->aka)
2204 outer->aka = BITMAP_GGC_ALLOC ();
2205 if (region->aka)
2206 bitmap_ior_into (outer->aka, region->aka);
2207 bitmap_set_bit (outer->aka, region->region_number);
2208 }
2209
2210 if (cfun->eh->built_landing_pads)
2211 lab = region->landing_pad;
2212 else
2213 lab = region->label;
2214 if (lab)
2215 remove_exception_handler_label (lab);
2216
2217 if (outer)
2218 pp_start = &outer->inner;
2219 else
2220 pp_start = &cfun->eh->region_tree;
2221 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2222 continue;
2223 *pp = region->next_peer;
2224
2225 inner = region->inner;
2226 if (inner)
2227 {
2228 for (p = inner; p->next_peer ; p = p->next_peer)
2229 p->outer = outer;
2230 p->outer = outer;
2231
2232 p->next_peer = *pp_start;
2233 *pp_start = inner;
2234 }
2235
2236 if (region->type == ERT_CATCH)
2237 {
2238 struct eh_region *try, *next, *prev;
2239
2240 for (try = region->next_peer;
2241 try->type == ERT_CATCH;
2242 try = try->next_peer)
2243 continue;
2244 gcc_assert (try->type == ERT_TRY);
2245
2246 next = region->u.catch.next_catch;
2247 prev = region->u.catch.prev_catch;
2248
2249 if (next)
2250 next->u.catch.prev_catch = prev;
2251 else
2252 try->u.try.last_catch = prev;
2253 if (prev)
2254 prev->u.catch.next_catch = next;
2255 else
2256 {
2257 try->u.try.catch = next;
2258 if (! next)
2259 remove_eh_handler (try);
2260 }
2261 }
2262 }
2263
2264 /* LABEL heads a basic block that is about to be deleted. If this
2265 label corresponds to an exception region, we may be able to
2266 delete the region. */
2267
2268 void
maybe_remove_eh_handler(rtx label)2269 maybe_remove_eh_handler (rtx label)
2270 {
2271 struct ehl_map_entry **slot, tmp;
2272 struct eh_region *region;
2273
2274 /* ??? After generating landing pads, it's not so simple to determine
2275 if the region data is completely unused. One must examine the
2276 landing pad and the post landing pad, and whether an inner try block
2277 is referencing the catch handlers directly. */
2278 if (cfun->eh->built_landing_pads)
2279 return;
2280
2281 tmp.label = label;
2282 slot = (struct ehl_map_entry **)
2283 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2284 if (! slot)
2285 return;
2286 region = (*slot)->region;
2287 if (! region)
2288 return;
2289
2290 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2291 because there is no path to the fallback call to terminate.
2292 But the region continues to affect call-site data until there
2293 are no more contained calls, which we don't see here. */
2294 if (region->type == ERT_MUST_NOT_THROW)
2295 {
2296 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2297 region->label = NULL_RTX;
2298 }
2299 else
2300 remove_eh_handler (region);
2301 }
2302
2303 /* Invokes CALLBACK for every exception handler label. Only used by old
2304 loop hackery; should not be used by new code. */
2305
2306 void
for_each_eh_label(void (* callback)(rtx))2307 for_each_eh_label (void (*callback) (rtx))
2308 {
2309 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2310 (void *) &callback);
2311 }
2312
2313 static int
for_each_eh_label_1(void ** pentry,void * data)2314 for_each_eh_label_1 (void **pentry, void *data)
2315 {
2316 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2317 void (*callback) (rtx) = *(void (**) (rtx)) data;
2318
2319 (*callback) (entry->label);
2320 return 1;
2321 }
2322
2323 /* Invoke CALLBACK for every exception region in the current function. */
2324
2325 void
for_each_eh_region(void (* callback)(struct eh_region *))2326 for_each_eh_region (void (*callback) (struct eh_region *))
2327 {
2328 int i, n = cfun->eh->last_region_number;
2329 for (i = 1; i <= n; ++i)
2330 {
2331 struct eh_region *region;
2332
2333 region = VEC_index (eh_region, cfun->eh->region_array, i);
2334 if (region)
2335 (*callback) (region);
2336 }
2337 }
2338
2339 /* This section describes CFG exception edges for flow. */
2340
2341 /* For communicating between calls to reachable_next_level. */
2342 struct reachable_info
2343 {
2344 tree types_caught;
2345 tree types_allowed;
2346 void (*callback) (struct eh_region *, void *);
2347 void *callback_data;
2348 bool saw_any_handlers;
2349 };
2350
2351 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2352 base class of TYPE, is in HANDLED. */
2353
2354 static int
check_handled(tree handled,tree type)2355 check_handled (tree handled, tree type)
2356 {
2357 tree t;
2358
2359 /* We can check for exact matches without front-end help. */
2360 if (! lang_eh_type_covers)
2361 {
2362 for (t = handled; t ; t = TREE_CHAIN (t))
2363 if (TREE_VALUE (t) == type)
2364 return 1;
2365 }
2366 else
2367 {
2368 for (t = handled; t ; t = TREE_CHAIN (t))
2369 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2370 return 1;
2371 }
2372
2373 return 0;
2374 }
2375
2376 /* A subroutine of reachable_next_level. If we are collecting a list
2377 of handlers, add one. After landing pad generation, reference
2378 it instead of the handlers themselves. Further, the handlers are
2379 all wired together, so by referencing one, we've got them all.
2380 Before landing pad generation we reference each handler individually.
2381
2382 LP_REGION contains the landing pad; REGION is the handler. */
2383
2384 static void
add_reachable_handler(struct reachable_info * info,struct eh_region * lp_region,struct eh_region * region)2385 add_reachable_handler (struct reachable_info *info,
2386 struct eh_region *lp_region, struct eh_region *region)
2387 {
2388 if (! info)
2389 return;
2390
2391 info->saw_any_handlers = true;
2392
2393 if (cfun->eh->built_landing_pads)
2394 info->callback (lp_region, info->callback_data);
2395 else
2396 info->callback (region, info->callback_data);
2397 }
2398
2399 /* Process one level of exception regions for reachability.
2400 If TYPE_THROWN is non-null, then it is the *exact* type being
2401 propagated. If INFO is non-null, then collect handler labels
2402 and caught/allowed type information between invocations. */
2403
2404 static enum reachable_code
reachable_next_level(struct eh_region * region,tree type_thrown,struct reachable_info * info)2405 reachable_next_level (struct eh_region *region, tree type_thrown,
2406 struct reachable_info *info)
2407 {
2408 switch (region->type)
2409 {
2410 case ERT_CLEANUP:
2411 /* Before landing-pad generation, we model control flow
2412 directly to the individual handlers. In this way we can
2413 see that catch handler types may shadow one another. */
2414 add_reachable_handler (info, region, region);
2415 return RNL_MAYBE_CAUGHT;
2416
2417 case ERT_TRY:
2418 {
2419 struct eh_region *c;
2420 enum reachable_code ret = RNL_NOT_CAUGHT;
2421
2422 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2423 {
2424 /* A catch-all handler ends the search. */
2425 if (c->u.catch.type_list == NULL)
2426 {
2427 add_reachable_handler (info, region, c);
2428 return RNL_CAUGHT;
2429 }
2430
2431 if (type_thrown)
2432 {
2433 /* If we have at least one type match, end the search. */
2434 tree tp_node = c->u.catch.type_list;
2435
2436 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2437 {
2438 tree type = TREE_VALUE (tp_node);
2439
2440 if (type == type_thrown
2441 || (lang_eh_type_covers
2442 && (*lang_eh_type_covers) (type, type_thrown)))
2443 {
2444 add_reachable_handler (info, region, c);
2445 return RNL_CAUGHT;
2446 }
2447 }
2448
2449 /* If we have definitive information of a match failure,
2450 the catch won't trigger. */
2451 if (lang_eh_type_covers)
2452 return RNL_NOT_CAUGHT;
2453 }
2454
2455 /* At this point, we either don't know what type is thrown or
2456 don't have front-end assistance to help deciding if it is
2457 covered by one of the types in the list for this region.
2458
2459 We'd then like to add this region to the list of reachable
2460 handlers since it is indeed potentially reachable based on the
2461 information we have.
2462
2463 Actually, this handler is for sure not reachable if all the
2464 types it matches have already been caught. That is, it is only
2465 potentially reachable if at least one of the types it catches
2466 has not been previously caught. */
2467
2468 if (! info)
2469 ret = RNL_MAYBE_CAUGHT;
2470 else
2471 {
2472 tree tp_node = c->u.catch.type_list;
2473 bool maybe_reachable = false;
2474
2475 /* Compute the potential reachability of this handler and
2476 update the list of types caught at the same time. */
2477 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2478 {
2479 tree type = TREE_VALUE (tp_node);
2480
2481 if (! check_handled (info->types_caught, type))
2482 {
2483 info->types_caught
2484 = tree_cons (NULL, type, info->types_caught);
2485
2486 maybe_reachable = true;
2487 }
2488 }
2489
2490 if (maybe_reachable)
2491 {
2492 add_reachable_handler (info, region, c);
2493
2494 /* ??? If the catch type is a base class of every allowed
2495 type, then we know we can stop the search. */
2496 ret = RNL_MAYBE_CAUGHT;
2497 }
2498 }
2499 }
2500
2501 return ret;
2502 }
2503
2504 case ERT_ALLOWED_EXCEPTIONS:
2505 /* An empty list of types definitely ends the search. */
2506 if (region->u.allowed.type_list == NULL_TREE)
2507 {
2508 add_reachable_handler (info, region, region);
2509 return RNL_CAUGHT;
2510 }
2511
2512 /* Collect a list of lists of allowed types for use in detecting
2513 when a catch may be transformed into a catch-all. */
2514 if (info)
2515 info->types_allowed = tree_cons (NULL_TREE,
2516 region->u.allowed.type_list,
2517 info->types_allowed);
2518
2519 /* If we have definitive information about the type hierarchy,
2520 then we can tell if the thrown type will pass through the
2521 filter. */
2522 if (type_thrown && lang_eh_type_covers)
2523 {
2524 if (check_handled (region->u.allowed.type_list, type_thrown))
2525 return RNL_NOT_CAUGHT;
2526 else
2527 {
2528 add_reachable_handler (info, region, region);
2529 return RNL_CAUGHT;
2530 }
2531 }
2532
2533 add_reachable_handler (info, region, region);
2534 return RNL_MAYBE_CAUGHT;
2535
2536 case ERT_CATCH:
2537 /* Catch regions are handled by their controlling try region. */
2538 return RNL_NOT_CAUGHT;
2539
2540 case ERT_MUST_NOT_THROW:
2541 /* Here we end our search, since no exceptions may propagate.
2542 If we've touched down at some landing pad previous, then the
2543 explicit function call we generated may be used. Otherwise
2544 the call is made by the runtime.
2545
2546 Before inlining, do not perform this optimization. We may
2547 inline a subroutine that contains handlers, and that will
2548 change the value of saw_any_handlers. */
2549
2550 if ((info && info->saw_any_handlers) || !cfun->after_inlining)
2551 {
2552 add_reachable_handler (info, region, region);
2553 return RNL_CAUGHT;
2554 }
2555 else
2556 return RNL_BLOCKED;
2557
2558 case ERT_THROW:
2559 case ERT_UNKNOWN:
2560 /* Shouldn't see these here. */
2561 gcc_unreachable ();
2562 break;
2563 default:
2564 gcc_unreachable ();
2565 }
2566 }
2567
2568 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2569
2570 void
foreach_reachable_handler(int region_number,bool is_resx,void (* callback)(struct eh_region *,void *),void * callback_data)2571 foreach_reachable_handler (int region_number, bool is_resx,
2572 void (*callback) (struct eh_region *, void *),
2573 void *callback_data)
2574 {
2575 struct reachable_info info;
2576 struct eh_region *region;
2577 tree type_thrown;
2578
2579 memset (&info, 0, sizeof (info));
2580 info.callback = callback;
2581 info.callback_data = callback_data;
2582
2583 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2584
2585 type_thrown = NULL_TREE;
2586 if (is_resx)
2587 {
2588 /* A RESX leaves a region instead of entering it. Thus the
2589 region itself may have been deleted out from under us. */
2590 if (region == NULL)
2591 return;
2592 region = region->outer;
2593 }
2594 else if (region->type == ERT_THROW)
2595 {
2596 type_thrown = region->u.throw.type;
2597 region = region->outer;
2598 }
2599
2600 while (region)
2601 {
2602 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2603 break;
2604 /* If we have processed one cleanup, there is no point in
2605 processing any more of them. Each cleanup will have an edge
2606 to the next outer cleanup region, so the flow graph will be
2607 accurate. */
2608 if (region->type == ERT_CLEANUP)
2609 region = region->u.cleanup.prev_try;
2610 else
2611 region = region->outer;
2612 }
2613 }
2614
2615 /* Retrieve a list of labels of exception handlers which can be
2616 reached by a given insn. */
2617
2618 static void
arh_to_landing_pad(struct eh_region * region,void * data)2619 arh_to_landing_pad (struct eh_region *region, void *data)
2620 {
2621 rtx *p_handlers = data;
2622 if (! *p_handlers)
2623 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2624 }
2625
2626 static void
arh_to_label(struct eh_region * region,void * data)2627 arh_to_label (struct eh_region *region, void *data)
2628 {
2629 rtx *p_handlers = data;
2630 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2631 }
2632
2633 rtx
reachable_handlers(rtx insn)2634 reachable_handlers (rtx insn)
2635 {
2636 bool is_resx = false;
2637 rtx handlers = NULL;
2638 int region_number;
2639
2640 if (JUMP_P (insn)
2641 && GET_CODE (PATTERN (insn)) == RESX)
2642 {
2643 region_number = XINT (PATTERN (insn), 0);
2644 is_resx = true;
2645 }
2646 else
2647 {
2648 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2649 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2650 return NULL;
2651 region_number = INTVAL (XEXP (note, 0));
2652 }
2653
2654 foreach_reachable_handler (region_number, is_resx,
2655 (cfun->eh->built_landing_pads
2656 ? arh_to_landing_pad
2657 : arh_to_label),
2658 &handlers);
2659
2660 return handlers;
2661 }
2662
2663 /* Determine if the given INSN can throw an exception that is caught
2664 within the function. */
2665
2666 bool
can_throw_internal_1(int region_number,bool is_resx)2667 can_throw_internal_1 (int region_number, bool is_resx)
2668 {
2669 struct eh_region *region;
2670 tree type_thrown;
2671
2672 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2673
2674 type_thrown = NULL_TREE;
2675 if (is_resx)
2676 region = region->outer;
2677 else if (region->type == ERT_THROW)
2678 {
2679 type_thrown = region->u.throw.type;
2680 region = region->outer;
2681 }
2682
2683 /* If this exception is ignored by each and every containing region,
2684 then control passes straight out. The runtime may handle some
2685 regions, which also do not require processing internally. */
2686 for (; region; region = region->outer)
2687 {
2688 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2689 if (how == RNL_BLOCKED)
2690 return false;
2691 if (how != RNL_NOT_CAUGHT)
2692 return true;
2693 }
2694
2695 return false;
2696 }
2697
2698 bool
can_throw_internal(rtx insn)2699 can_throw_internal (rtx insn)
2700 {
2701 rtx note;
2702
2703 if (! INSN_P (insn))
2704 return false;
2705
2706 if (JUMP_P (insn)
2707 && GET_CODE (PATTERN (insn)) == RESX
2708 && XINT (PATTERN (insn), 0) > 0)
2709 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true);
2710
2711 if (NONJUMP_INSN_P (insn)
2712 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2713 insn = XVECEXP (PATTERN (insn), 0, 0);
2714
2715 /* Every insn that might throw has an EH_REGION note. */
2716 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2717 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2718 return false;
2719
2720 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false);
2721 }
2722
2723 /* Determine if the given INSN can throw an exception that is
2724 visible outside the function. */
2725
2726 bool
can_throw_external_1(int region_number,bool is_resx)2727 can_throw_external_1 (int region_number, bool is_resx)
2728 {
2729 struct eh_region *region;
2730 tree type_thrown;
2731
2732 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2733
2734 type_thrown = NULL_TREE;
2735 if (is_resx)
2736 region = region->outer;
2737 else if (region->type == ERT_THROW)
2738 {
2739 type_thrown = region->u.throw.type;
2740 region = region->outer;
2741 }
2742
2743 /* If the exception is caught or blocked by any containing region,
2744 then it is not seen by any calling function. */
2745 for (; region ; region = region->outer)
2746 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2747 return false;
2748
2749 return true;
2750 }
2751
2752 bool
can_throw_external(rtx insn)2753 can_throw_external (rtx insn)
2754 {
2755 rtx note;
2756
2757 if (! INSN_P (insn))
2758 return false;
2759
2760 if (JUMP_P (insn)
2761 && GET_CODE (PATTERN (insn)) == RESX
2762 && XINT (PATTERN (insn), 0) > 0)
2763 return can_throw_external_1 (XINT (PATTERN (insn), 0), true);
2764
2765 if (NONJUMP_INSN_P (insn)
2766 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2767 insn = XVECEXP (PATTERN (insn), 0, 0);
2768
2769 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2770 if (!note)
2771 {
2772 /* Calls (and trapping insns) without notes are outside any
2773 exception handling region in this function. We have to
2774 assume it might throw. Given that the front end and middle
2775 ends mark known NOTHROW functions, this isn't so wildly
2776 inaccurate. */
2777 return (CALL_P (insn)
2778 || (flag_non_call_exceptions
2779 && may_trap_p (PATTERN (insn))));
2780 }
2781 if (INTVAL (XEXP (note, 0)) <= 0)
2782 return false;
2783
2784 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false);
2785 }
2786
2787 /* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
2788
2789 unsigned int
set_nothrow_function_flags(void)2790 set_nothrow_function_flags (void)
2791 {
2792 rtx insn;
2793
2794 /* If we don't know that this implementation of the function will
2795 actually be used, then we must not set TREE_NOTHROW, since
2796 callers must not assume that this function does not throw. */
2797 if (DECL_REPLACEABLE_P (current_function_decl))
2798 return 0;
2799
2800 TREE_NOTHROW (current_function_decl) = 1;
2801
2802 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2803 something that can throw an exception. We specifically exempt
2804 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2805 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2806 is optimistic. */
2807
2808 cfun->all_throwers_are_sibcalls = 1;
2809
2810 if (! flag_exceptions)
2811 return 0;
2812
2813 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2814 if (can_throw_external (insn))
2815 {
2816 TREE_NOTHROW (current_function_decl) = 0;
2817
2818 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2819 {
2820 cfun->all_throwers_are_sibcalls = 0;
2821 return 0;
2822 }
2823 }
2824
2825 for (insn = current_function_epilogue_delay_list; insn;
2826 insn = XEXP (insn, 1))
2827 if (can_throw_external (insn))
2828 {
2829 TREE_NOTHROW (current_function_decl) = 0;
2830
2831 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2832 {
2833 cfun->all_throwers_are_sibcalls = 0;
2834 return 0;
2835 }
2836 }
2837 return 0;
2838 }
2839
2840 struct tree_opt_pass pass_set_nothrow_function_flags =
2841 {
2842 NULL, /* name */
2843 NULL, /* gate */
2844 set_nothrow_function_flags, /* execute */
2845 NULL, /* sub */
2846 NULL, /* next */
2847 0, /* static_pass_number */
2848 0, /* tv_id */
2849 0, /* properties_required */
2850 0, /* properties_provided */
2851 0, /* properties_destroyed */
2852 0, /* todo_flags_start */
2853 0, /* todo_flags_finish */
2854 0 /* letter */
2855 };
2856
2857
2858 /* Various hooks for unwind library. */
2859
2860 /* Do any necessary initialization to access arbitrary stack frames.
2861 On the SPARC, this means flushing the register windows. */
2862
2863 void
expand_builtin_unwind_init(void)2864 expand_builtin_unwind_init (void)
2865 {
2866 /* Set this so all the registers get saved in our frame; we need to be
2867 able to copy the saved values for any registers from frames we unwind. */
2868 current_function_has_nonlocal_label = 1;
2869
2870 #ifdef SETUP_FRAME_ADDRESSES
2871 SETUP_FRAME_ADDRESSES ();
2872 #endif
2873 }
2874
2875 rtx
expand_builtin_eh_return_data_regno(tree arglist)2876 expand_builtin_eh_return_data_regno (tree arglist)
2877 {
2878 tree which = TREE_VALUE (arglist);
2879 unsigned HOST_WIDE_INT iwhich;
2880
2881 if (TREE_CODE (which) != INTEGER_CST)
2882 {
2883 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2884 return constm1_rtx;
2885 }
2886
2887 iwhich = tree_low_cst (which, 1);
2888 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2889 if (iwhich == INVALID_REGNUM)
2890 return constm1_rtx;
2891
2892 #ifdef DWARF_FRAME_REGNUM
2893 iwhich = DWARF_FRAME_REGNUM (iwhich);
2894 #else
2895 iwhich = DBX_REGISTER_NUMBER (iwhich);
2896 #endif
2897
2898 return GEN_INT (iwhich);
2899 }
2900
2901 /* Given a value extracted from the return address register or stack slot,
2902 return the actual address encoded in that value. */
2903
2904 rtx
expand_builtin_extract_return_addr(tree addr_tree)2905 expand_builtin_extract_return_addr (tree addr_tree)
2906 {
2907 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2908
2909 if (GET_MODE (addr) != Pmode
2910 && GET_MODE (addr) != VOIDmode)
2911 {
2912 #ifdef POINTERS_EXTEND_UNSIGNED
2913 addr = convert_memory_address (Pmode, addr);
2914 #else
2915 addr = convert_to_mode (Pmode, addr, 0);
2916 #endif
2917 }
2918
2919 /* First mask out any unwanted bits. */
2920 #ifdef MASK_RETURN_ADDR
2921 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2922 #endif
2923
2924 /* Then adjust to find the real return address. */
2925 #if defined (RETURN_ADDR_OFFSET)
2926 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2927 #endif
2928
2929 return addr;
2930 }
2931
2932 /* Given an actual address in addr_tree, do any necessary encoding
2933 and return the value to be stored in the return address register or
2934 stack slot so the epilogue will return to that address. */
2935
2936 rtx
expand_builtin_frob_return_addr(tree addr_tree)2937 expand_builtin_frob_return_addr (tree addr_tree)
2938 {
2939 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2940
2941 addr = convert_memory_address (Pmode, addr);
2942
2943 #ifdef RETURN_ADDR_OFFSET
2944 addr = force_reg (Pmode, addr);
2945 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2946 #endif
2947
2948 return addr;
2949 }
2950
2951 /* Set up the epilogue with the magic bits we'll need to return to the
2952 exception handler. */
2953
2954 void
expand_builtin_eh_return(tree stackadj_tree ATTRIBUTE_UNUSED,tree handler_tree)2955 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2956 tree handler_tree)
2957 {
2958 rtx tmp;
2959
2960 #ifdef EH_RETURN_STACKADJ_RTX
2961 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2962 tmp = convert_memory_address (Pmode, tmp);
2963 if (!cfun->eh->ehr_stackadj)
2964 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2965 else if (tmp != cfun->eh->ehr_stackadj)
2966 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
2967 #endif
2968
2969 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2970 tmp = convert_memory_address (Pmode, tmp);
2971 if (!cfun->eh->ehr_handler)
2972 cfun->eh->ehr_handler = copy_to_reg (tmp);
2973 else if (tmp != cfun->eh->ehr_handler)
2974 emit_move_insn (cfun->eh->ehr_handler, tmp);
2975
2976 if (!cfun->eh->ehr_label)
2977 cfun->eh->ehr_label = gen_label_rtx ();
2978 emit_jump (cfun->eh->ehr_label);
2979 }
2980
2981 void
expand_eh_return(void)2982 expand_eh_return (void)
2983 {
2984 rtx around_label;
2985
2986 if (! cfun->eh->ehr_label)
2987 return;
2988
2989 current_function_calls_eh_return = 1;
2990
2991 #ifdef EH_RETURN_STACKADJ_RTX
2992 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2993 #endif
2994
2995 around_label = gen_label_rtx ();
2996 emit_jump (around_label);
2997
2998 emit_label (cfun->eh->ehr_label);
2999 clobber_return_register ();
3000
3001 #ifdef EH_RETURN_STACKADJ_RTX
3002 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3003 #endif
3004
3005 #ifdef HAVE_eh_return
3006 if (HAVE_eh_return)
3007 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
3008 else
3009 #endif
3010 {
3011 #ifdef EH_RETURN_HANDLER_RTX
3012 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3013 #else
3014 error ("__builtin_eh_return not supported on this target");
3015 #endif
3016 }
3017
3018 emit_label (around_label);
3019 }
3020
3021 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3022 POINTERS_EXTEND_UNSIGNED and return it. */
3023
3024 rtx
expand_builtin_extend_pointer(tree addr_tree)3025 expand_builtin_extend_pointer (tree addr_tree)
3026 {
3027 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3028 int extend;
3029
3030 #ifdef POINTERS_EXTEND_UNSIGNED
3031 extend = POINTERS_EXTEND_UNSIGNED;
3032 #else
3033 /* The previous EH code did an unsigned extend by default, so we do this also
3034 for consistency. */
3035 extend = 1;
3036 #endif
3037
3038 return convert_modes (word_mode, ptr_mode, addr, extend);
3039 }
3040
3041 /* In the following functions, we represent entries in the action table
3042 as 1-based indices. Special cases are:
3043
3044 0: null action record, non-null landing pad; implies cleanups
3045 -1: null action record, null landing pad; implies no action
3046 -2: no call-site entry; implies must_not_throw
3047 -3: we have yet to process outer regions
3048
3049 Further, no special cases apply to the "next" field of the record.
3050 For next, 0 means end of list. */
3051
3052 struct action_record
3053 {
3054 int offset;
3055 int filter;
3056 int next;
3057 };
3058
3059 static int
action_record_eq(const void * pentry,const void * pdata)3060 action_record_eq (const void *pentry, const void *pdata)
3061 {
3062 const struct action_record *entry = (const struct action_record *) pentry;
3063 const struct action_record *data = (const struct action_record *) pdata;
3064 return entry->filter == data->filter && entry->next == data->next;
3065 }
3066
3067 static hashval_t
action_record_hash(const void * pentry)3068 action_record_hash (const void *pentry)
3069 {
3070 const struct action_record *entry = (const struct action_record *) pentry;
3071 return entry->next * 1009 + entry->filter;
3072 }
3073
3074 static int
add_action_record(htab_t ar_hash,int filter,int next)3075 add_action_record (htab_t ar_hash, int filter, int next)
3076 {
3077 struct action_record **slot, *new, tmp;
3078
3079 tmp.filter = filter;
3080 tmp.next = next;
3081 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3082
3083 if ((new = *slot) == NULL)
3084 {
3085 new = xmalloc (sizeof (*new));
3086 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3087 new->filter = filter;
3088 new->next = next;
3089 *slot = new;
3090
3091 /* The filter value goes in untouched. The link to the next
3092 record is a "self-relative" byte offset, or zero to indicate
3093 that there is no next record. So convert the absolute 1 based
3094 indices we've been carrying around into a displacement. */
3095
3096 push_sleb128 (&cfun->eh->action_record_data, filter);
3097 if (next)
3098 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3099 push_sleb128 (&cfun->eh->action_record_data, next);
3100 }
3101
3102 return new->offset;
3103 }
3104
3105 static int
collect_one_action_chain(htab_t ar_hash,struct eh_region * region)3106 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3107 {
3108 struct eh_region *c;
3109 int next;
3110
3111 /* If we've reached the top of the region chain, then we have
3112 no actions, and require no landing pad. */
3113 if (region == NULL)
3114 return -1;
3115
3116 switch (region->type)
3117 {
3118 case ERT_CLEANUP:
3119 /* A cleanup adds a zero filter to the beginning of the chain, but
3120 there are special cases to look out for. If there are *only*
3121 cleanups along a path, then it compresses to a zero action.
3122 Further, if there are multiple cleanups along a path, we only
3123 need to represent one of them, as that is enough to trigger
3124 entry to the landing pad at runtime. */
3125 next = collect_one_action_chain (ar_hash, region->outer);
3126 if (next <= 0)
3127 return 0;
3128 for (c = region->outer; c ; c = c->outer)
3129 if (c->type == ERT_CLEANUP)
3130 return next;
3131 return add_action_record (ar_hash, 0, next);
3132
3133 case ERT_TRY:
3134 /* Process the associated catch regions in reverse order.
3135 If there's a catch-all handler, then we don't need to
3136 search outer regions. Use a magic -3 value to record
3137 that we haven't done the outer search. */
3138 next = -3;
3139 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3140 {
3141 if (c->u.catch.type_list == NULL)
3142 {
3143 /* Retrieve the filter from the head of the filter list
3144 where we have stored it (see assign_filter_values). */
3145 int filter
3146 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3147
3148 next = add_action_record (ar_hash, filter, 0);
3149 }
3150 else
3151 {
3152 /* Once the outer search is done, trigger an action record for
3153 each filter we have. */
3154 tree flt_node;
3155
3156 if (next == -3)
3157 {
3158 next = collect_one_action_chain (ar_hash, region->outer);
3159
3160 /* If there is no next action, terminate the chain. */
3161 if (next == -1)
3162 next = 0;
3163 /* If all outer actions are cleanups or must_not_throw,
3164 we'll have no action record for it, since we had wanted
3165 to encode these states in the call-site record directly.
3166 Add a cleanup action to the chain to catch these. */
3167 else if (next <= 0)
3168 next = add_action_record (ar_hash, 0, 0);
3169 }
3170
3171 flt_node = c->u.catch.filter_list;
3172 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3173 {
3174 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3175 next = add_action_record (ar_hash, filter, next);
3176 }
3177 }
3178 }
3179 return next;
3180
3181 case ERT_ALLOWED_EXCEPTIONS:
3182 /* An exception specification adds its filter to the
3183 beginning of the chain. */
3184 next = collect_one_action_chain (ar_hash, region->outer);
3185
3186 /* If there is no next action, terminate the chain. */
3187 if (next == -1)
3188 next = 0;
3189 /* If all outer actions are cleanups or must_not_throw,
3190 we'll have no action record for it, since we had wanted
3191 to encode these states in the call-site record directly.
3192 Add a cleanup action to the chain to catch these. */
3193 else if (next <= 0)
3194 next = add_action_record (ar_hash, 0, 0);
3195
3196 return add_action_record (ar_hash, region->u.allowed.filter, next);
3197
3198 case ERT_MUST_NOT_THROW:
3199 /* A must-not-throw region with no inner handlers or cleanups
3200 requires no call-site entry. Note that this differs from
3201 the no handler or cleanup case in that we do require an lsda
3202 to be generated. Return a magic -2 value to record this. */
3203 return -2;
3204
3205 case ERT_CATCH:
3206 case ERT_THROW:
3207 /* CATCH regions are handled in TRY above. THROW regions are
3208 for optimization information only and produce no output. */
3209 return collect_one_action_chain (ar_hash, region->outer);
3210
3211 default:
3212 gcc_unreachable ();
3213 }
3214 }
3215
3216 static int
add_call_site(rtx landing_pad,int action)3217 add_call_site (rtx landing_pad, int action)
3218 {
3219 struct call_site_record *data = cfun->eh->call_site_data;
3220 int used = cfun->eh->call_site_data_used;
3221 int size = cfun->eh->call_site_data_size;
3222
3223 if (used >= size)
3224 {
3225 size = (size ? size * 2 : 64);
3226 data = ggc_realloc (data, sizeof (*data) * size);
3227 cfun->eh->call_site_data = data;
3228 cfun->eh->call_site_data_size = size;
3229 }
3230
3231 data[used].landing_pad = landing_pad;
3232 data[used].action = action;
3233
3234 cfun->eh->call_site_data_used = used + 1;
3235
3236 return used + call_site_base;
3237 }
3238
3239 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3240 The new note numbers will not refer to region numbers, but
3241 instead to call site entries. */
3242
3243 unsigned int
convert_to_eh_region_ranges(void)3244 convert_to_eh_region_ranges (void)
3245 {
3246 rtx insn, iter, note;
3247 htab_t ar_hash;
3248 int last_action = -3;
3249 rtx last_action_insn = NULL_RTX;
3250 rtx last_landing_pad = NULL_RTX;
3251 rtx first_no_action_insn = NULL_RTX;
3252 int call_site = 0;
3253
3254 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3255 return 0;
3256
3257 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3258
3259 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3260
3261 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3262 if (INSN_P (iter))
3263 {
3264 struct eh_region *region;
3265 int this_action;
3266 rtx this_landing_pad;
3267
3268 insn = iter;
3269 if (NONJUMP_INSN_P (insn)
3270 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3271 insn = XVECEXP (PATTERN (insn), 0, 0);
3272
3273 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3274 if (!note)
3275 {
3276 if (! (CALL_P (insn)
3277 || (flag_non_call_exceptions
3278 && may_trap_p (PATTERN (insn)))))
3279 continue;
3280 this_action = -1;
3281 region = NULL;
3282 }
3283 else
3284 {
3285 if (INTVAL (XEXP (note, 0)) <= 0)
3286 continue;
3287 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3288 this_action = collect_one_action_chain (ar_hash, region);
3289 }
3290
3291 /* Existence of catch handlers, or must-not-throw regions
3292 implies that an lsda is needed (even if empty). */
3293 if (this_action != -1)
3294 cfun->uses_eh_lsda = 1;
3295
3296 /* Delay creation of region notes for no-action regions
3297 until we're sure that an lsda will be required. */
3298 else if (last_action == -3)
3299 {
3300 first_no_action_insn = iter;
3301 last_action = -1;
3302 }
3303
3304 /* Cleanups and handlers may share action chains but not
3305 landing pads. Collect the landing pad for this region. */
3306 if (this_action >= 0)
3307 {
3308 struct eh_region *o;
3309 for (o = region; ! o->landing_pad ; o = o->outer)
3310 continue;
3311 this_landing_pad = o->landing_pad;
3312 }
3313 else
3314 this_landing_pad = NULL_RTX;
3315
3316 /* Differing actions or landing pads implies a change in call-site
3317 info, which implies some EH_REGION note should be emitted. */
3318 if (last_action != this_action
3319 || last_landing_pad != this_landing_pad)
3320 {
3321 /* If we'd not seen a previous action (-3) or the previous
3322 action was must-not-throw (-2), then we do not need an
3323 end note. */
3324 if (last_action >= -1)
3325 {
3326 /* If we delayed the creation of the begin, do it now. */
3327 if (first_no_action_insn)
3328 {
3329 call_site = add_call_site (NULL_RTX, 0);
3330 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3331 first_no_action_insn);
3332 NOTE_EH_HANDLER (note) = call_site;
3333 first_no_action_insn = NULL_RTX;
3334 }
3335
3336 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3337 last_action_insn);
3338 NOTE_EH_HANDLER (note) = call_site;
3339 }
3340
3341 /* If the new action is must-not-throw, then no region notes
3342 are created. */
3343 if (this_action >= -1)
3344 {
3345 call_site = add_call_site (this_landing_pad,
3346 this_action < 0 ? 0 : this_action);
3347 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3348 NOTE_EH_HANDLER (note) = call_site;
3349 }
3350
3351 last_action = this_action;
3352 last_landing_pad = this_landing_pad;
3353 }
3354 last_action_insn = iter;
3355 }
3356
3357 if (last_action >= -1 && ! first_no_action_insn)
3358 {
3359 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3360 NOTE_EH_HANDLER (note) = call_site;
3361 }
3362
3363 htab_delete (ar_hash);
3364 return 0;
3365 }
3366
3367 struct tree_opt_pass pass_convert_to_eh_region_ranges =
3368 {
3369 "eh-ranges", /* name */
3370 NULL, /* gate */
3371 convert_to_eh_region_ranges, /* execute */
3372 NULL, /* sub */
3373 NULL, /* next */
3374 0, /* static_pass_number */
3375 0, /* tv_id */
3376 0, /* properties_required */
3377 0, /* properties_provided */
3378 0, /* properties_destroyed */
3379 0, /* todo_flags_start */
3380 TODO_dump_func, /* todo_flags_finish */
3381 0 /* letter */
3382 };
3383
3384
3385 static void
push_uleb128(varray_type * data_area,unsigned int value)3386 push_uleb128 (varray_type *data_area, unsigned int value)
3387 {
3388 do
3389 {
3390 unsigned char byte = value & 0x7f;
3391 value >>= 7;
3392 if (value)
3393 byte |= 0x80;
3394 VARRAY_PUSH_UCHAR (*data_area, byte);
3395 }
3396 while (value);
3397 }
3398
3399 static void
push_sleb128(varray_type * data_area,int value)3400 push_sleb128 (varray_type *data_area, int value)
3401 {
3402 unsigned char byte;
3403 int more;
3404
3405 do
3406 {
3407 byte = value & 0x7f;
3408 value >>= 7;
3409 more = ! ((value == 0 && (byte & 0x40) == 0)
3410 || (value == -1 && (byte & 0x40) != 0));
3411 if (more)
3412 byte |= 0x80;
3413 VARRAY_PUSH_UCHAR (*data_area, byte);
3414 }
3415 while (more);
3416 }
3417
3418
3419 #ifndef HAVE_AS_LEB128
3420 static int
dw2_size_of_call_site_table(void)3421 dw2_size_of_call_site_table (void)
3422 {
3423 int n = cfun->eh->call_site_data_used;
3424 int size = n * (4 + 4 + 4);
3425 int i;
3426
3427 for (i = 0; i < n; ++i)
3428 {
3429 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3430 size += size_of_uleb128 (cs->action);
3431 }
3432
3433 return size;
3434 }
3435
3436 static int
sjlj_size_of_call_site_table(void)3437 sjlj_size_of_call_site_table (void)
3438 {
3439 int n = cfun->eh->call_site_data_used;
3440 int size = 0;
3441 int i;
3442
3443 for (i = 0; i < n; ++i)
3444 {
3445 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3446 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3447 size += size_of_uleb128 (cs->action);
3448 }
3449
3450 return size;
3451 }
3452 #endif
3453
3454 static void
dw2_output_call_site_table(void)3455 dw2_output_call_site_table (void)
3456 {
3457 int n = cfun->eh->call_site_data_used;
3458 int i;
3459
3460 for (i = 0; i < n; ++i)
3461 {
3462 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3463 char reg_start_lab[32];
3464 char reg_end_lab[32];
3465 char landing_pad_lab[32];
3466
3467 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3468 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3469
3470 if (cs->landing_pad)
3471 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3472 CODE_LABEL_NUMBER (cs->landing_pad));
3473
3474 /* ??? Perhaps use insn length scaling if the assembler supports
3475 generic arithmetic. */
3476 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3477 data4 if the function is small enough. */
3478 #ifdef HAVE_AS_LEB128
3479 dw2_asm_output_delta_uleb128 (reg_start_lab,
3480 current_function_func_begin_label,
3481 "region %d start", i);
3482 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3483 "length");
3484 if (cs->landing_pad)
3485 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3486 current_function_func_begin_label,
3487 "landing pad");
3488 else
3489 dw2_asm_output_data_uleb128 (0, "landing pad");
3490 #else
3491 dw2_asm_output_delta (4, reg_start_lab,
3492 current_function_func_begin_label,
3493 "region %d start", i);
3494 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3495 if (cs->landing_pad)
3496 dw2_asm_output_delta (4, landing_pad_lab,
3497 current_function_func_begin_label,
3498 "landing pad");
3499 else
3500 dw2_asm_output_data (4, 0, "landing pad");
3501 #endif
3502 dw2_asm_output_data_uleb128 (cs->action, "action");
3503 }
3504
3505 call_site_base += n;
3506 }
3507
3508 static void
sjlj_output_call_site_table(void)3509 sjlj_output_call_site_table (void)
3510 {
3511 int n = cfun->eh->call_site_data_used;
3512 int i;
3513
3514 for (i = 0; i < n; ++i)
3515 {
3516 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3517
3518 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3519 "region %d landing pad", i);
3520 dw2_asm_output_data_uleb128 (cs->action, "action");
3521 }
3522
3523 call_site_base += n;
3524 }
3525
3526 #ifndef TARGET_UNWIND_INFO
3527 /* Switch to the section that should be used for exception tables. */
3528
3529 static void
switch_to_exception_section(void)3530 switch_to_exception_section (void)
3531 {
3532 if (exception_section == 0)
3533 {
3534 if (targetm.have_named_sections)
3535 {
3536 int flags;
3537
3538 if (EH_TABLES_CAN_BE_READ_ONLY)
3539 {
3540 int tt_format =
3541 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3542 flags = ((! flag_pic
3543 || ((tt_format & 0x70) != DW_EH_PE_absptr
3544 && (tt_format & 0x70) != DW_EH_PE_aligned))
3545 ? 0 : SECTION_WRITE);
3546 }
3547 else
3548 flags = SECTION_WRITE;
3549 exception_section = get_section (".gcc_except_table", flags, NULL);
3550 }
3551 else
3552 exception_section = flag_pic ? data_section : readonly_data_section;
3553 }
3554 switch_to_section (exception_section);
3555 }
3556 #endif
3557
3558
3559 /* Output a reference from an exception table to the type_info object TYPE.
3560 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3561 the value. */
3562
3563 static void
output_ttype(tree type,int tt_format,int tt_format_size)3564 output_ttype (tree type, int tt_format, int tt_format_size)
3565 {
3566 rtx value;
3567 bool public = true;
3568
3569 if (type == NULL_TREE)
3570 value = const0_rtx;
3571 else
3572 {
3573 struct cgraph_varpool_node *node;
3574
3575 type = lookup_type_for_runtime (type);
3576 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3577
3578 /* Let cgraph know that the rtti decl is used. Not all of the
3579 paths below go through assemble_integer, which would take
3580 care of this for us. */
3581 STRIP_NOPS (type);
3582 if (TREE_CODE (type) == ADDR_EXPR)
3583 {
3584 type = TREE_OPERAND (type, 0);
3585 if (TREE_CODE (type) == VAR_DECL)
3586 {
3587 node = cgraph_varpool_node (type);
3588 if (node)
3589 cgraph_varpool_mark_needed_node (node);
3590 public = TREE_PUBLIC (type);
3591 }
3592 }
3593 else
3594 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3595 }
3596
3597 /* Allow the target to override the type table entry format. */
3598 if (targetm.asm_out.ttype (value))
3599 return;
3600
3601 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3602 assemble_integer (value, tt_format_size,
3603 tt_format_size * BITS_PER_UNIT, 1);
3604 else
3605 dw2_asm_output_encoded_addr_rtx (tt_format, value, public, NULL);
3606 }
3607
3608 void
output_function_exception_table(void)3609 output_function_exception_table (void)
3610 {
3611 int tt_format, cs_format, lp_format, i, n;
3612 #ifdef HAVE_AS_LEB128
3613 char ttype_label[32];
3614 char cs_after_size_label[32];
3615 char cs_end_label[32];
3616 #else
3617 int call_site_len;
3618 #endif
3619 int have_tt_data;
3620 int tt_format_size = 0;
3621
3622 if (eh_personality_libfunc)
3623 assemble_external_libcall (eh_personality_libfunc);
3624
3625 /* Not all functions need anything. */
3626 if (! cfun->uses_eh_lsda)
3627 return;
3628
3629 #ifdef TARGET_UNWIND_INFO
3630 /* TODO: Move this into target file. */
3631 fputs ("\t.personality\t", asm_out_file);
3632 output_addr_const (asm_out_file, eh_personality_libfunc);
3633 fputs ("\n\t.handlerdata\n", asm_out_file);
3634 /* Note that varasm still thinks we're in the function's code section.
3635 The ".endp" directive that will immediately follow will take us back. */
3636 #else
3637 switch_to_exception_section ();
3638 #endif
3639
3640 /* If the target wants a label to begin the table, emit it here. */
3641 targetm.asm_out.except_table_label (asm_out_file);
3642
3643 have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0
3644 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3645
3646 /* Indicate the format of the @TType entries. */
3647 if (! have_tt_data)
3648 tt_format = DW_EH_PE_omit;
3649 else
3650 {
3651 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3652 #ifdef HAVE_AS_LEB128
3653 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3654 current_function_funcdef_no);
3655 #endif
3656 tt_format_size = size_of_encoded_value (tt_format);
3657
3658 assemble_align (tt_format_size * BITS_PER_UNIT);
3659 }
3660
3661 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3662 current_function_funcdef_no);
3663
3664 /* The LSDA header. */
3665
3666 /* Indicate the format of the landing pad start pointer. An omitted
3667 field implies @LPStart == @Start. */
3668 /* Currently we always put @LPStart == @Start. This field would
3669 be most useful in moving the landing pads completely out of
3670 line to another section, but it could also be used to minimize
3671 the size of uleb128 landing pad offsets. */
3672 lp_format = DW_EH_PE_omit;
3673 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3674 eh_data_format_name (lp_format));
3675
3676 /* @LPStart pointer would go here. */
3677
3678 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3679 eh_data_format_name (tt_format));
3680
3681 #ifndef HAVE_AS_LEB128
3682 if (USING_SJLJ_EXCEPTIONS)
3683 call_site_len = sjlj_size_of_call_site_table ();
3684 else
3685 call_site_len = dw2_size_of_call_site_table ();
3686 #endif
3687
3688 /* A pc-relative 4-byte displacement to the @TType data. */
3689 if (have_tt_data)
3690 {
3691 #ifdef HAVE_AS_LEB128
3692 char ttype_after_disp_label[32];
3693 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3694 current_function_funcdef_no);
3695 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3696 "@TType base offset");
3697 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3698 #else
3699 /* Ug. Alignment queers things. */
3700 unsigned int before_disp, after_disp, last_disp, disp;
3701
3702 before_disp = 1 + 1;
3703 after_disp = (1 + size_of_uleb128 (call_site_len)
3704 + call_site_len
3705 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3706 + (VEC_length (tree, cfun->eh->ttype_data)
3707 * tt_format_size));
3708
3709 disp = after_disp;
3710 do
3711 {
3712 unsigned int disp_size, pad;
3713
3714 last_disp = disp;
3715 disp_size = size_of_uleb128 (disp);
3716 pad = before_disp + disp_size + after_disp;
3717 if (pad % tt_format_size)
3718 pad = tt_format_size - (pad % tt_format_size);
3719 else
3720 pad = 0;
3721 disp = after_disp + pad;
3722 }
3723 while (disp != last_disp);
3724
3725 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3726 #endif
3727 }
3728
3729 /* Indicate the format of the call-site offsets. */
3730 #ifdef HAVE_AS_LEB128
3731 cs_format = DW_EH_PE_uleb128;
3732 #else
3733 cs_format = DW_EH_PE_udata4;
3734 #endif
3735 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3736 eh_data_format_name (cs_format));
3737
3738 #ifdef HAVE_AS_LEB128
3739 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3740 current_function_funcdef_no);
3741 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3742 current_function_funcdef_no);
3743 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3744 "Call-site table length");
3745 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3746 if (USING_SJLJ_EXCEPTIONS)
3747 sjlj_output_call_site_table ();
3748 else
3749 dw2_output_call_site_table ();
3750 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3751 #else
3752 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3753 if (USING_SJLJ_EXCEPTIONS)
3754 sjlj_output_call_site_table ();
3755 else
3756 dw2_output_call_site_table ();
3757 #endif
3758
3759 /* ??? Decode and interpret the data for flag_debug_asm. */
3760 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3761 for (i = 0; i < n; ++i)
3762 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3763 (i ? NULL : "Action record table"));
3764
3765 if (have_tt_data)
3766 assemble_align (tt_format_size * BITS_PER_UNIT);
3767
3768 i = VEC_length (tree, cfun->eh->ttype_data);
3769 while (i-- > 0)
3770 {
3771 tree type = VEC_index (tree, cfun->eh->ttype_data, i);
3772 output_ttype (type, tt_format, tt_format_size);
3773 }
3774
3775 #ifdef HAVE_AS_LEB128
3776 if (have_tt_data)
3777 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3778 #endif
3779
3780 /* ??? Decode and interpret the data for flag_debug_asm. */
3781 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3782 for (i = 0; i < n; ++i)
3783 {
3784 if (targetm.arm_eabi_unwinder)
3785 {
3786 tree type = VARRAY_TREE (cfun->eh->ehspec_data, i);
3787 output_ttype (type, tt_format, tt_format_size);
3788 }
3789 else
3790 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3791 (i ? NULL : "Exception specification table"));
3792 }
3793
3794 switch_to_section (current_function_section ());
3795 }
3796
3797 void
set_eh_throw_stmt_table(struct function * fun,struct htab * table)3798 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3799 {
3800 fun->eh->throw_stmt_table = table;
3801 }
3802
3803 htab_t
get_eh_throw_stmt_table(struct function * fun)3804 get_eh_throw_stmt_table (struct function *fun)
3805 {
3806 return fun->eh->throw_stmt_table;
3807 }
3808
3809 /* Dump EH information to OUT. */
3810 void
dump_eh_tree(FILE * out,struct function * fun)3811 dump_eh_tree (FILE *out, struct function *fun)
3812 {
3813 struct eh_region *i;
3814 int depth = 0;
3815 static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
3816 "allowed_exceptions", "must_not_throw",
3817 "throw"};
3818
3819 i = fun->eh->region_tree;
3820 if (! i)
3821 return;
3822
3823 fprintf (out, "Eh tree:\n");
3824 while (1)
3825 {
3826 fprintf (out, " %*s %i %s", depth * 2, "",
3827 i->region_number, type_name [(int)i->type]);
3828 if (i->tree_label)
3829 {
3830 fprintf (out, " tree_label:");
3831 print_generic_expr (out, i->tree_label, 0);
3832 }
3833 fprintf (out, "\n");
3834 /* If there are sub-regions, process them. */
3835 if (i->inner)
3836 i = i->inner, depth++;
3837 /* If there are peers, process them. */
3838 else if (i->next_peer)
3839 i = i->next_peer;
3840 /* Otherwise, step back up the tree to the next peer. */
3841 else
3842 {
3843 do {
3844 i = i->outer;
3845 depth--;
3846 if (i == NULL)
3847 return;
3848 } while (i->next_peer == NULL);
3849 i = i->next_peer;
3850 }
3851 }
3852 }
3853
3854 /* Verify some basic invariants on EH datastructures. Could be extended to
3855 catch more. */
3856 void
verify_eh_tree(struct function * fun)3857 verify_eh_tree (struct function *fun)
3858 {
3859 struct eh_region *i, *outer = NULL;
3860 bool err = false;
3861 int nvisited = 0;
3862 int count = 0;
3863 int j;
3864 int depth = 0;
3865
3866 i = fun->eh->region_tree;
3867 if (! i)
3868 return;
3869 for (j = fun->eh->last_region_number; j > 0; --j)
3870 if ((i = VEC_index (eh_region, cfun->eh->region_array, j)))
3871 {
3872 count++;
3873 if (i->region_number != j)
3874 {
3875 error ("region_array is corrupted for region %i", i->region_number);
3876 err = true;
3877 }
3878 }
3879
3880 while (1)
3881 {
3882 if (VEC_index (eh_region, cfun->eh->region_array, i->region_number) != i)
3883 {
3884 error ("region_array is corrupted for region %i", i->region_number);
3885 err = true;
3886 }
3887 if (i->outer != outer)
3888 {
3889 error ("outer block of region %i is wrong", i->region_number);
3890 err = true;
3891 }
3892 if (i->may_contain_throw && outer && !outer->may_contain_throw)
3893 {
3894 error ("region %i may contain throw and is contained in region that may not",
3895 i->region_number);
3896 err = true;
3897 }
3898 if (depth < 0)
3899 {
3900 error ("negative nesting depth of region %i", i->region_number);
3901 err = true;
3902 }
3903 nvisited ++;
3904 /* If there are sub-regions, process them. */
3905 if (i->inner)
3906 outer = i, i = i->inner, depth++;
3907 /* If there are peers, process them. */
3908 else if (i->next_peer)
3909 i = i->next_peer;
3910 /* Otherwise, step back up the tree to the next peer. */
3911 else
3912 {
3913 do {
3914 i = i->outer;
3915 depth--;
3916 if (i == NULL)
3917 {
3918 if (depth != -1)
3919 {
3920 error ("tree list ends on depth %i", depth + 1);
3921 err = true;
3922 }
3923 if (count != nvisited)
3924 {
3925 error ("array does not match the region tree");
3926 err = true;
3927 }
3928 if (err)
3929 {
3930 dump_eh_tree (stderr, fun);
3931 internal_error ("verify_eh_tree failed");
3932 }
3933 return;
3934 }
3935 outer = i->outer;
3936 } while (i->next_peer == NULL);
3937 i = i->next_peer;
3938 }
3939 }
3940 }
3941
3942 /* Initialize unwind_resume_libfunc. */
3943
3944 void
default_init_unwind_resume_libfunc(void)3945 default_init_unwind_resume_libfunc (void)
3946 {
3947 /* The default c++ routines aren't actually c++ specific, so use those. */
3948 unwind_resume_libfunc =
3949 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
3950 : "_Unwind_Resume");
3951 }
3952
3953
3954 static bool
gate_handle_eh(void)3955 gate_handle_eh (void)
3956 {
3957 return doing_eh (0);
3958 }
3959
3960 /* Complete generation of exception handling code. */
3961 static unsigned int
rest_of_handle_eh(void)3962 rest_of_handle_eh (void)
3963 {
3964 cleanup_cfg (CLEANUP_NO_INSN_DEL);
3965 finish_eh_generation ();
3966 cleanup_cfg (CLEANUP_NO_INSN_DEL);
3967 return 0;
3968 }
3969
3970 struct tree_opt_pass pass_rtl_eh =
3971 {
3972 "eh", /* name */
3973 gate_handle_eh, /* gate */
3974 rest_of_handle_eh, /* execute */
3975 NULL, /* sub */
3976 NULL, /* next */
3977 0, /* static_pass_number */
3978 TV_JUMP, /* tv_id */
3979 0, /* properties_required */
3980 0, /* properties_provided */
3981 0, /* properties_destroyed */
3982 0, /* todo_flags_start */
3983 TODO_dump_func, /* todo_flags_finish */
3984 'h' /* letter */
3985 };
3986
3987 #include "gt-except.h"
3988