xref: /dragonfly/contrib/gcc-4.7/gcc/except.c (revision 0085a56d)
1 /* Implements exception handling.
2    Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4    Free Software Foundation, Inc.
5    Contributed by Mike Stump <mrs@cygnus.com>.
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 
24 /* An exception is an event that can be "thrown" from within a
25    function.  This event can then be "caught" by the callers of
26    the function.
27 
28    The representation of exceptions changes several times during
29    the compilation process:
30 
31    In the beginning, in the front end, we have the GENERIC trees
32    TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
33    CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
34 
35    During initial gimplification (gimplify.c) these are lowered
36    to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
37    The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
38    into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
39    conversion.
40 
41    During pass_lower_eh (tree-eh.c) we record the nested structure
42    of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
43    We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
44    regions at this time.  We can then flatten the statements within
45    the TRY nodes to straight-line code.  Statements that had been within
46    TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
47    so that we may remember what action is supposed to be taken if
48    a given statement does throw.  During this lowering process,
49    we create an EH_LANDING_PAD node for each EH_REGION that has
50    some code within the function that needs to be executed if a
51    throw does happen.  We also create RESX statements that are
52    used to transfer control from an inner EH_REGION to an outer
53    EH_REGION.  We also create EH_DISPATCH statements as placeholders
54    for a runtime type comparison that should be made in order to
55    select the action to perform among different CATCH and EH_FILTER
56    regions.
57 
58    During pass_lower_eh_dispatch (tree-eh.c), which is run after
59    all inlining is complete, we are able to run assign_filter_values,
60    which allows us to map the set of types manipulated by all of the
61    CATCH and EH_FILTER regions to a set of integers.  This set of integers
62    will be how the exception runtime communicates with the code generated
63    within the function.  We then expand the GIMPLE_EH_DISPATCH statements
64    to a switch or conditional branches that use the argument provided by
65    the runtime (__builtin_eh_filter) and the set of integers we computed
66    in assign_filter_values.
67 
68    During pass_lower_resx (tree-eh.c), which is run near the end
69    of optimization, we expand RESX statements.  If the eh region
70    that is outer to the RESX statement is a MUST_NOT_THROW, then
71    the RESX expands to some form of abort statement.  If the eh
72    region that is outer to the RESX statement is within the current
73    function, then the RESX expands to a bookkeeping call
74    (__builtin_eh_copy_values) and a goto.  Otherwise, the next
75    handler for the exception must be within a function somewhere
76    up the call chain, so we call back into the exception runtime
77    (__builtin_unwind_resume).
78 
79    During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
80    that create an rtl to eh_region mapping that corresponds to the
81    gimple to eh_region mapping that had been recorded in the
82    THROW_STMT_TABLE.
83 
84    During pass_rtl_eh (except.c), we generate the real landing pads
85    to which the runtime will actually transfer control.  These new
86    landing pads perform whatever bookkeeping is needed by the target
87    backend in order to resume execution within the current function.
88    Each of these new landing pads falls through into the post_landing_pad
89    label which had been used within the CFG up to this point.  All
90    exception edges within the CFG are redirected to the new landing pads.
91    If the target uses setjmp to implement exceptions, the various extra
92    calls into the runtime to register and unregister the current stack
93    frame are emitted at this time.
94 
95    During pass_convert_to_eh_region_ranges (except.c), we transform
96    the REG_EH_REGION notes attached to individual insns into
97    non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
98    and NOTE_INSN_EH_REGION_END.  Each insn within such ranges has the
99    same associated action within the exception region tree, meaning
100    that (1) the exception is caught by the same landing pad within the
101    current function, (2) the exception is blocked by the runtime with
102    a MUST_NOT_THROW region, or (3) the exception is not handled at all
103    within the current function.
104 
105    Finally, during assembly generation, we call
106    output_function_exception_table (except.c) to emit the tables with
107    which the exception runtime can determine if a given stack frame
108    handles a given exception, and if so what filter value to provide
109    to the function when the non-local control transfer is effected.
110    If the target uses dwarf2 unwinding to implement exceptions, then
111    output_call_frame_info (dwarf2out.c) emits the required unwind data.  */
112 
113 
114 #include "config.h"
115 #include "system.h"
116 #include "coretypes.h"
117 #include "tm.h"
118 #include "rtl.h"
119 #include "tree.h"
120 #include "flags.h"
121 #include "function.h"
122 #include "expr.h"
123 #include "libfuncs.h"
124 #include "insn-config.h"
125 #include "except.h"
126 #include "integrate.h"
127 #include "hard-reg-set.h"
128 #include "basic-block.h"
129 #include "output.h"
130 #include "dwarf2asm.h"
131 #include "dwarf2out.h"
132 #include "dwarf2.h"
133 #include "toplev.h"
134 #include "hashtab.h"
135 #include "intl.h"
136 #include "ggc.h"
137 #include "tm_p.h"
138 #include "target.h"
139 #include "common/common-target.h"
140 #include "langhooks.h"
141 #include "cgraph.h"
142 #include "diagnostic.h"
143 #include "tree-pretty-print.h"
144 #include "tree-pass.h"
145 #include "timevar.h"
146 #include "tree-flow.h"
147 
148 /* Provide defaults for stuff that may not be defined when using
149    sjlj exceptions.  */
150 #ifndef EH_RETURN_DATA_REGNO
151 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
152 #endif
153 
154 static GTY(()) int call_site_base;
155 static GTY ((param_is (union tree_node)))
156   htab_t type_to_runtime_map;
157 
158 /* Describe the SjLj_Function_Context structure.  */
159 static GTY(()) tree sjlj_fc_type_node;
160 static int sjlj_fc_call_site_ofs;
161 static int sjlj_fc_data_ofs;
162 static int sjlj_fc_personality_ofs;
163 static int sjlj_fc_lsda_ofs;
164 static int sjlj_fc_jbuf_ofs;
165 
166 
167 struct GTY(()) call_site_record_d
168 {
169   rtx landing_pad;
170   int action;
171 };
172 
173 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
174 					   eh_landing_pad *);
175 
176 static int t2r_eq (const void *, const void *);
177 static hashval_t t2r_hash (const void *);
178 
179 static int ttypes_filter_eq (const void *, const void *);
180 static hashval_t ttypes_filter_hash (const void *);
181 static int ehspec_filter_eq (const void *, const void *);
182 static hashval_t ehspec_filter_hash (const void *);
183 static int add_ttypes_entry (htab_t, tree);
184 static int add_ehspec_entry (htab_t, htab_t, tree);
185 static void dw2_build_landing_pads (void);
186 
187 static int action_record_eq (const void *, const void *);
188 static hashval_t action_record_hash (const void *);
189 static int add_action_record (htab_t, int, int);
190 static int collect_one_action_chain (htab_t, eh_region);
191 static int add_call_site (rtx, int, int);
192 
193 static void push_uleb128 (VEC (uchar, gc) **, unsigned int);
194 static void push_sleb128 (VEC (uchar, gc) **, int);
195 #ifndef HAVE_AS_LEB128
196 static int dw2_size_of_call_site_table (int);
197 static int sjlj_size_of_call_site_table (void);
198 #endif
199 static void dw2_output_call_site_table (int, int);
200 static void sjlj_output_call_site_table (void);
201 
202 
203 void
204 init_eh (void)
205 {
206   if (! flag_exceptions)
207     return;
208 
209   type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
210 
211   /* Create the SjLj_Function_Context structure.  This should match
212      the definition in unwind-sjlj.c.  */
213   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
214     {
215       tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
216 
217       sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
218 
219       f_prev = build_decl (BUILTINS_LOCATION,
220 			   FIELD_DECL, get_identifier ("__prev"),
221 			   build_pointer_type (sjlj_fc_type_node));
222       DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
223 
224       f_cs = build_decl (BUILTINS_LOCATION,
225 			 FIELD_DECL, get_identifier ("__call_site"),
226 			 integer_type_node);
227       DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
228 
229       tmp = build_index_type (size_int (4 - 1));
230       tmp = build_array_type (lang_hooks.types.type_for_mode
231 				(targetm.unwind_word_mode (), 1),
232 			      tmp);
233       f_data = build_decl (BUILTINS_LOCATION,
234 			   FIELD_DECL, get_identifier ("__data"), tmp);
235       DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
236 
237       f_per = build_decl (BUILTINS_LOCATION,
238 			  FIELD_DECL, get_identifier ("__personality"),
239 			  ptr_type_node);
240       DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
241 
242       f_lsda = build_decl (BUILTINS_LOCATION,
243 			   FIELD_DECL, get_identifier ("__lsda"),
244 			   ptr_type_node);
245       DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
246 
247 #ifdef DONT_USE_BUILTIN_SETJMP
248 #ifdef JMP_BUF_SIZE
249       tmp = size_int (JMP_BUF_SIZE - 1);
250 #else
251       /* Should be large enough for most systems, if it is not,
252 	 JMP_BUF_SIZE should be defined with the proper value.  It will
253 	 also tend to be larger than necessary for most systems, a more
254 	 optimal port will define JMP_BUF_SIZE.  */
255       tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
256 #endif
257 #else
258       /* builtin_setjmp takes a pointer to 5 words.  */
259       tmp = size_int (5 * BITS_PER_WORD / POINTER_SIZE - 1);
260 #endif
261       tmp = build_index_type (tmp);
262       tmp = build_array_type (ptr_type_node, tmp);
263       f_jbuf = build_decl (BUILTINS_LOCATION,
264 			   FIELD_DECL, get_identifier ("__jbuf"), tmp);
265 #ifdef DONT_USE_BUILTIN_SETJMP
266       /* We don't know what the alignment requirements of the
267 	 runtime's jmp_buf has.  Overestimate.  */
268       DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
269       DECL_USER_ALIGN (f_jbuf) = 1;
270 #endif
271       DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
272 
273       TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
274       TREE_CHAIN (f_prev) = f_cs;
275       TREE_CHAIN (f_cs) = f_data;
276       TREE_CHAIN (f_data) = f_per;
277       TREE_CHAIN (f_per) = f_lsda;
278       TREE_CHAIN (f_lsda) = f_jbuf;
279 
280       layout_type (sjlj_fc_type_node);
281 
282       /* Cache the interesting field offsets so that we have
283 	 easy access from rtl.  */
284       sjlj_fc_call_site_ofs
285 	= (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
286 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
287       sjlj_fc_data_ofs
288 	= (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
289 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
290       sjlj_fc_personality_ofs
291 	= (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
292 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
293       sjlj_fc_lsda_ofs
294 	= (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
295 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
296       sjlj_fc_jbuf_ofs
297 	= (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
298 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
299     }
300 }
301 
302 void
303 init_eh_for_function (void)
304 {
305   cfun->eh = ggc_alloc_cleared_eh_status ();
306 
307   /* Make sure zero'th entries are used.  */
308   VEC_safe_push (eh_region, gc, cfun->eh->region_array, NULL);
309   VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, NULL);
310 }
311 
312 /* Routines to generate the exception tree somewhat directly.
313    These are used from tree-eh.c when processing exception related
314    nodes during tree optimization.  */
315 
316 static eh_region
317 gen_eh_region (enum eh_region_type type, eh_region outer)
318 {
319   eh_region new_eh;
320 
321   /* Insert a new blank region as a leaf in the tree.  */
322   new_eh = ggc_alloc_cleared_eh_region_d ();
323   new_eh->type = type;
324   new_eh->outer = outer;
325   if (outer)
326     {
327       new_eh->next_peer = outer->inner;
328       outer->inner = new_eh;
329     }
330   else
331     {
332       new_eh->next_peer = cfun->eh->region_tree;
333       cfun->eh->region_tree = new_eh;
334     }
335 
336   new_eh->index = VEC_length (eh_region, cfun->eh->region_array);
337   VEC_safe_push (eh_region, gc, cfun->eh->region_array, new_eh);
338 
339   /* Copy the language's notion of whether to use __cxa_end_cleanup.  */
340   if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
341     new_eh->use_cxa_end_cleanup = true;
342 
343   return new_eh;
344 }
345 
346 eh_region
347 gen_eh_region_cleanup (eh_region outer)
348 {
349   return gen_eh_region (ERT_CLEANUP, outer);
350 }
351 
352 eh_region
353 gen_eh_region_try (eh_region outer)
354 {
355   return gen_eh_region (ERT_TRY, outer);
356 }
357 
358 eh_catch
359 gen_eh_region_catch (eh_region t, tree type_or_list)
360 {
361   eh_catch c, l;
362   tree type_list, type_node;
363 
364   gcc_assert (t->type == ERT_TRY);
365 
366   /* Ensure to always end up with a type list to normalize further
367      processing, then register each type against the runtime types map.  */
368   type_list = type_or_list;
369   if (type_or_list)
370     {
371       if (TREE_CODE (type_or_list) != TREE_LIST)
372 	type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
373 
374       type_node = type_list;
375       for (; type_node; type_node = TREE_CHAIN (type_node))
376 	add_type_for_runtime (TREE_VALUE (type_node));
377     }
378 
379   c = ggc_alloc_cleared_eh_catch_d ();
380   c->type_list = type_list;
381   l = t->u.eh_try.last_catch;
382   c->prev_catch = l;
383   if (l)
384     l->next_catch = c;
385   else
386     t->u.eh_try.first_catch = c;
387   t->u.eh_try.last_catch = c;
388 
389   return c;
390 }
391 
392 eh_region
393 gen_eh_region_allowed (eh_region outer, tree allowed)
394 {
395   eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
396   region->u.allowed.type_list = allowed;
397 
398   for (; allowed ; allowed = TREE_CHAIN (allowed))
399     add_type_for_runtime (TREE_VALUE (allowed));
400 
401   return region;
402 }
403 
404 eh_region
405 gen_eh_region_must_not_throw (eh_region outer)
406 {
407   return gen_eh_region (ERT_MUST_NOT_THROW, outer);
408 }
409 
410 eh_landing_pad
411 gen_eh_landing_pad (eh_region region)
412 {
413   eh_landing_pad lp = ggc_alloc_cleared_eh_landing_pad_d ();
414 
415   lp->next_lp = region->landing_pads;
416   lp->region = region;
417   lp->index = VEC_length (eh_landing_pad, cfun->eh->lp_array);
418   region->landing_pads = lp;
419 
420   VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, lp);
421 
422   return lp;
423 }
424 
425 eh_region
426 get_eh_region_from_number_fn (struct function *ifun, int i)
427 {
428   return VEC_index (eh_region, ifun->eh->region_array, i);
429 }
430 
431 eh_region
432 get_eh_region_from_number (int i)
433 {
434   return get_eh_region_from_number_fn (cfun, i);
435 }
436 
437 eh_landing_pad
438 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
439 {
440   return VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
441 }
442 
443 eh_landing_pad
444 get_eh_landing_pad_from_number (int i)
445 {
446   return get_eh_landing_pad_from_number_fn (cfun, i);
447 }
448 
449 eh_region
450 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
451 {
452   if (i < 0)
453     return VEC_index (eh_region, ifun->eh->region_array, -i);
454   else if (i == 0)
455     return NULL;
456   else
457     {
458       eh_landing_pad lp;
459       lp = VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
460       return lp->region;
461     }
462 }
463 
464 eh_region
465 get_eh_region_from_lp_number (int i)
466 {
467   return get_eh_region_from_lp_number_fn (cfun, i);
468 }
469 
470 /* Returns true if the current function has exception handling regions.  */
471 
472 bool
473 current_function_has_exception_handlers (void)
474 {
475   return cfun->eh->region_tree != NULL;
476 }
477 
478 /* A subroutine of duplicate_eh_regions.  Copy the eh_region tree at OLD.
479    Root it at OUTER, and apply LP_OFFSET to the lp numbers.  */
480 
481 struct duplicate_eh_regions_data
482 {
483   duplicate_eh_regions_map label_map;
484   void *label_map_data;
485   struct pointer_map_t *eh_map;
486 };
487 
488 static void
489 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
490 			eh_region old_r, eh_region outer)
491 {
492   eh_landing_pad old_lp, new_lp;
493   eh_region new_r;
494   void **slot;
495 
496   new_r = gen_eh_region (old_r->type, outer);
497   slot = pointer_map_insert (data->eh_map, (void *)old_r);
498   gcc_assert (*slot == NULL);
499   *slot = (void *)new_r;
500 
501   switch (old_r->type)
502     {
503     case ERT_CLEANUP:
504       break;
505 
506     case ERT_TRY:
507       {
508 	eh_catch oc, nc;
509 	for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
510 	  {
511 	    /* We should be doing all our region duplication before and
512 	       during inlining, which is before filter lists are created.  */
513 	    gcc_assert (oc->filter_list == NULL);
514 	    nc = gen_eh_region_catch (new_r, oc->type_list);
515 	    nc->label = data->label_map (oc->label, data->label_map_data);
516 	  }
517       }
518       break;
519 
520     case ERT_ALLOWED_EXCEPTIONS:
521       new_r->u.allowed.type_list = old_r->u.allowed.type_list;
522       if (old_r->u.allowed.label)
523 	new_r->u.allowed.label
524 	    = data->label_map (old_r->u.allowed.label, data->label_map_data);
525       else
526 	new_r->u.allowed.label = NULL_TREE;
527       break;
528 
529     case ERT_MUST_NOT_THROW:
530       new_r->u.must_not_throw = old_r->u.must_not_throw;
531       break;
532     }
533 
534   for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
535     {
536       /* Don't bother copying unused landing pads.  */
537       if (old_lp->post_landing_pad == NULL)
538 	continue;
539 
540       new_lp = gen_eh_landing_pad (new_r);
541       slot = pointer_map_insert (data->eh_map, (void *)old_lp);
542       gcc_assert (*slot == NULL);
543       *slot = (void *)new_lp;
544 
545       new_lp->post_landing_pad
546 	= data->label_map (old_lp->post_landing_pad, data->label_map_data);
547       EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
548     }
549 
550   /* Make sure to preserve the original use of __cxa_end_cleanup.  */
551   new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
552 
553   for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
554     duplicate_eh_regions_1 (data, old_r, new_r);
555 }
556 
557 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
558    the current function and root the tree below OUTER_REGION.
559    The special case of COPY_REGION of NULL means all regions.
560    Remap labels using MAP/MAP_DATA callback.  Return a pointer map
561    that allows the caller to remap uses of both EH regions and
562    EH landing pads.  */
563 
564 struct pointer_map_t *
565 duplicate_eh_regions (struct function *ifun,
566 		      eh_region copy_region, int outer_lp,
567 		      duplicate_eh_regions_map map, void *map_data)
568 {
569   struct duplicate_eh_regions_data data;
570   eh_region outer_region;
571 
572 #ifdef ENABLE_CHECKING
573   verify_eh_tree (ifun);
574 #endif
575 
576   data.label_map = map;
577   data.label_map_data = map_data;
578   data.eh_map = pointer_map_create ();
579 
580   outer_region = get_eh_region_from_lp_number (outer_lp);
581 
582   /* Copy all the regions in the subtree.  */
583   if (copy_region)
584     duplicate_eh_regions_1 (&data, copy_region, outer_region);
585   else
586     {
587       eh_region r;
588       for (r = ifun->eh->region_tree; r ; r = r->next_peer)
589 	duplicate_eh_regions_1 (&data, r, outer_region);
590     }
591 
592 #ifdef ENABLE_CHECKING
593   verify_eh_tree (cfun);
594 #endif
595 
596   return data.eh_map;
597 }
598 
599 /* Return the region that is outer to both REGION_A and REGION_B in IFUN.  */
600 
601 eh_region
602 eh_region_outermost (struct function *ifun, eh_region region_a,
603 		     eh_region region_b)
604 {
605   sbitmap b_outer;
606 
607   gcc_assert (ifun->eh->region_array);
608   gcc_assert (ifun->eh->region_tree);
609 
610   b_outer = sbitmap_alloc (VEC_length (eh_region, ifun->eh->region_array));
611   sbitmap_zero (b_outer);
612 
613   do
614     {
615       SET_BIT (b_outer, region_b->index);
616       region_b = region_b->outer;
617     }
618   while (region_b);
619 
620   do
621     {
622       if (TEST_BIT (b_outer, region_a->index))
623 	break;
624       region_a = region_a->outer;
625     }
626   while (region_a);
627 
628   sbitmap_free (b_outer);
629   return region_a;
630 }
631 
632 static int
633 t2r_eq (const void *pentry, const void *pdata)
634 {
635   const_tree const entry = (const_tree) pentry;
636   const_tree const data = (const_tree) pdata;
637 
638   return TREE_PURPOSE (entry) == data;
639 }
640 
641 static hashval_t
642 t2r_hash (const void *pentry)
643 {
644   const_tree const entry = (const_tree) pentry;
645   return TREE_HASH (TREE_PURPOSE (entry));
646 }
647 
648 void
649 add_type_for_runtime (tree type)
650 {
651   tree *slot;
652 
653   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
654   if (TREE_CODE (type) == NOP_EXPR)
655     return;
656 
657   slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
658 					    TREE_HASH (type), INSERT);
659   if (*slot == NULL)
660     {
661       tree runtime = lang_hooks.eh_runtime_type (type);
662       *slot = tree_cons (type, runtime, NULL_TREE);
663     }
664 }
665 
666 tree
667 lookup_type_for_runtime (tree type)
668 {
669   tree *slot;
670 
671   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
672   if (TREE_CODE (type) == NOP_EXPR)
673     return type;
674 
675   slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
676 					    TREE_HASH (type), NO_INSERT);
677 
678   /* We should have always inserted the data earlier.  */
679   return TREE_VALUE (*slot);
680 }
681 
682 
683 /* Represent an entry in @TTypes for either catch actions
684    or exception filter actions.  */
685 struct ttypes_filter {
686   tree t;
687   int filter;
688 };
689 
690 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
691    (a tree) for a @TTypes type node we are thinking about adding.  */
692 
693 static int
694 ttypes_filter_eq (const void *pentry, const void *pdata)
695 {
696   const struct ttypes_filter *const entry
697     = (const struct ttypes_filter *) pentry;
698   const_tree const data = (const_tree) pdata;
699 
700   return entry->t == data;
701 }
702 
703 static hashval_t
704 ttypes_filter_hash (const void *pentry)
705 {
706   const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
707   return TREE_HASH (entry->t);
708 }
709 
710 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
711    exception specification list we are thinking about adding.  */
712 /* ??? Currently we use the type lists in the order given.  Someone
713    should put these in some canonical order.  */
714 
715 static int
716 ehspec_filter_eq (const void *pentry, const void *pdata)
717 {
718   const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
719   const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
720 
721   return type_list_equal (entry->t, data->t);
722 }
723 
724 /* Hash function for exception specification lists.  */
725 
726 static hashval_t
727 ehspec_filter_hash (const void *pentry)
728 {
729   const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
730   hashval_t h = 0;
731   tree list;
732 
733   for (list = entry->t; list ; list = TREE_CHAIN (list))
734     h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
735   return h;
736 }
737 
738 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
739    to speed up the search.  Return the filter value to be used.  */
740 
741 static int
742 add_ttypes_entry (htab_t ttypes_hash, tree type)
743 {
744   struct ttypes_filter **slot, *n;
745 
746   slot = (struct ttypes_filter **)
747     htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
748 
749   if ((n = *slot) == NULL)
750     {
751       /* Filter value is a 1 based table index.  */
752 
753       n = XNEW (struct ttypes_filter);
754       n->t = type;
755       n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
756       *slot = n;
757 
758       VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
759     }
760 
761   return n->filter;
762 }
763 
764 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
765    to speed up the search.  Return the filter value to be used.  */
766 
767 static int
768 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
769 {
770   struct ttypes_filter **slot, *n;
771   struct ttypes_filter dummy;
772 
773   dummy.t = list;
774   slot = (struct ttypes_filter **)
775     htab_find_slot (ehspec_hash, &dummy, INSERT);
776 
777   if ((n = *slot) == NULL)
778     {
779       int len;
780 
781       if (targetm.arm_eabi_unwinder)
782 	len = VEC_length (tree, cfun->eh->ehspec_data.arm_eabi);
783       else
784 	len = VEC_length (uchar, cfun->eh->ehspec_data.other);
785 
786       /* Filter value is a -1 based byte index into a uleb128 buffer.  */
787 
788       n = XNEW (struct ttypes_filter);
789       n->t = list;
790       n->filter = -(len + 1);
791       *slot = n;
792 
793       /* Generate a 0 terminated list of filter values.  */
794       for (; list ; list = TREE_CHAIN (list))
795 	{
796 	  if (targetm.arm_eabi_unwinder)
797 	    VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi,
798 			   TREE_VALUE (list));
799 	  else
800 	    {
801 	      /* Look up each type in the list and encode its filter
802 		 value as a uleb128.  */
803 	      push_uleb128 (&cfun->eh->ehspec_data.other,
804 			    add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
805 	    }
806 	}
807       if (targetm.arm_eabi_unwinder)
808 	VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
809       else
810 	VEC_safe_push (uchar, gc, cfun->eh->ehspec_data.other, 0);
811     }
812 
813   return n->filter;
814 }
815 
816 /* Generate the action filter values to be used for CATCH and
817    ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
818    we use lots of landing pads, and so every type or list can share
819    the same filter value, which saves table space.  */
820 
821 void
822 assign_filter_values (void)
823 {
824   int i;
825   htab_t ttypes, ehspec;
826   eh_region r;
827   eh_catch c;
828 
829   cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
830   if (targetm.arm_eabi_unwinder)
831     cfun->eh->ehspec_data.arm_eabi = VEC_alloc (tree, gc, 64);
832   else
833     cfun->eh->ehspec_data.other = VEC_alloc (uchar, gc, 64);
834 
835   ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
836   ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
837 
838   for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
839     {
840       if (r == NULL)
841 	continue;
842 
843       switch (r->type)
844 	{
845 	case ERT_TRY:
846 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
847 	    {
848 	      /* Whatever type_list is (NULL or true list), we build a list
849 		 of filters for the region.  */
850 	      c->filter_list = NULL_TREE;
851 
852 	      if (c->type_list != NULL)
853 		{
854 		  /* Get a filter value for each of the types caught and store
855 		     them in the region's dedicated list.  */
856 		  tree tp_node = c->type_list;
857 
858 		  for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
859 		    {
860 		      int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
861 		      tree flt_node = build_int_cst (integer_type_node, flt);
862 
863 		      c->filter_list
864 			= tree_cons (NULL_TREE, flt_node, c->filter_list);
865 		    }
866 		}
867 	      else
868 		{
869 		  /* Get a filter value for the NULL list also since it
870 		     will need an action record anyway.  */
871 		  int flt = add_ttypes_entry (ttypes, NULL);
872 		  tree flt_node = build_int_cst (integer_type_node, flt);
873 
874 		  c->filter_list
875 		    = tree_cons (NULL_TREE, flt_node, NULL);
876 		}
877 	    }
878 	  break;
879 
880 	case ERT_ALLOWED_EXCEPTIONS:
881 	  r->u.allowed.filter
882 	    = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
883 	  break;
884 
885 	default:
886 	  break;
887 	}
888     }
889 
890   htab_delete (ttypes);
891   htab_delete (ehspec);
892 }
893 
894 /* Emit SEQ into basic block just before INSN (that is assumed to be
895    first instruction of some existing BB and return the newly
896    produced block.  */
897 static basic_block
898 emit_to_new_bb_before (rtx seq, rtx insn)
899 {
900   rtx last;
901   basic_block bb;
902   edge e;
903   edge_iterator ei;
904 
905   /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
906      call), we don't want it to go into newly created landing pad or other EH
907      construct.  */
908   for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
909     if (e->flags & EDGE_FALLTHRU)
910       force_nonfallthru (e);
911     else
912       ei_next (&ei);
913   last = emit_insn_before (seq, insn);
914   if (BARRIER_P (last))
915     last = PREV_INSN (last);
916   bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
917   update_bb_for_insn (bb);
918   bb->flags |= BB_SUPERBLOCK;
919   return bb;
920 }
921 
922 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
923    at the rtl level.  Emit the code required by the target at a landing
924    pad for the given region.  */
925 
926 void
927 expand_dw2_landing_pad_for_region (eh_region region)
928 {
929 #ifdef HAVE_exception_receiver
930   if (HAVE_exception_receiver)
931     emit_insn (gen_exception_receiver ());
932   else
933 #endif
934 #ifdef HAVE_nonlocal_goto_receiver
935   if (HAVE_nonlocal_goto_receiver)
936     emit_insn (gen_nonlocal_goto_receiver ());
937   else
938 #endif
939     { /* Nothing */ }
940 
941   if (region->exc_ptr_reg)
942     emit_move_insn (region->exc_ptr_reg,
943 		    gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
944   if (region->filter_reg)
945     emit_move_insn (region->filter_reg,
946 		    gen_rtx_REG (targetm.eh_return_filter_mode (),
947 				 EH_RETURN_DATA_REGNO (1)));
948 }
949 
950 /* Expand the extra code needed at landing pads for dwarf2 unwinding.  */
951 
952 static void
953 dw2_build_landing_pads (void)
954 {
955   int i;
956   eh_landing_pad lp;
957   int e_flags = EDGE_FALLTHRU;
958 
959   /* If we're going to partition blocks, we need to be able to add
960      new landing pads later, which means that we need to hold on to
961      the post-landing-pad block.  Prevent it from being merged away.
962      We'll remove this bit after partitioning.  */
963   if (flag_reorder_blocks_and_partition)
964     e_flags |= EDGE_PRESERVE;
965 
966   for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
967     {
968       basic_block bb;
969       rtx seq;
970       edge e;
971 
972       if (lp == NULL || lp->post_landing_pad == NULL)
973 	continue;
974 
975       start_sequence ();
976 
977       lp->landing_pad = gen_label_rtx ();
978       emit_label (lp->landing_pad);
979       LABEL_PRESERVE_P (lp->landing_pad) = 1;
980 
981       expand_dw2_landing_pad_for_region (lp->region);
982 
983       seq = get_insns ();
984       end_sequence ();
985 
986       bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
987       e = make_edge (bb, bb->next_bb, e_flags);
988       e->count = bb->count;
989       e->probability = REG_BR_PROB_BASE;
990     }
991 }
992 
993 
994 static VEC (int, heap) *sjlj_lp_call_site_index;
995 
996 /* Process all active landing pads.  Assign each one a compact dispatch
997    index, and a call-site index.  */
998 
999 static int
1000 sjlj_assign_call_site_values (void)
1001 {
1002   htab_t ar_hash;
1003   int i, disp_index;
1004   eh_landing_pad lp;
1005 
1006   crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
1007   ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1008 
1009   disp_index = 0;
1010   call_site_base = 1;
1011   for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1012     if (lp && lp->post_landing_pad)
1013       {
1014 	int action, call_site;
1015 
1016 	/* First: build the action table.  */
1017 	action = collect_one_action_chain (ar_hash, lp->region);
1018 
1019 	/* Next: assign call-site values.  If dwarf2 terms, this would be
1020 	   the region number assigned by convert_to_eh_region_ranges, but
1021 	   handles no-action and must-not-throw differently.  */
1022 	/* Map must-not-throw to otherwise unused call-site index 0.  */
1023 	if (action == -2)
1024 	  call_site = 0;
1025 	/* Map no-action to otherwise unused call-site index -1.  */
1026 	else if (action == -1)
1027 	  call_site = -1;
1028 	/* Otherwise, look it up in the table.  */
1029 	else
1030 	  call_site = add_call_site (GEN_INT (disp_index), action, 0);
1031 	VEC_replace (int, sjlj_lp_call_site_index, i, call_site);
1032 
1033 	disp_index++;
1034       }
1035 
1036   htab_delete (ar_hash);
1037 
1038   return disp_index;
1039 }
1040 
1041 /* Emit code to record the current call-site index before every
1042    insn that can throw.  */
1043 
1044 static void
1045 sjlj_mark_call_sites (void)
1046 {
1047   int last_call_site = -2;
1048   rtx insn, mem;
1049 
1050   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1051     {
1052       eh_landing_pad lp;
1053       eh_region r;
1054       bool nothrow;
1055       int this_call_site;
1056       rtx before, p;
1057 
1058       /* Reset value tracking at extended basic block boundaries.  */
1059       if (LABEL_P (insn))
1060 	last_call_site = -2;
1061 
1062       if (! INSN_P (insn))
1063 	continue;
1064 
1065       nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1066       if (nothrow)
1067 	continue;
1068       if (lp)
1069 	this_call_site = VEC_index (int, sjlj_lp_call_site_index, lp->index);
1070       else if (r == NULL)
1071 	{
1072 	  /* Calls (and trapping insns) without notes are outside any
1073 	     exception handling region in this function.  Mark them as
1074 	     no action.  */
1075 	  this_call_site = -1;
1076 	}
1077       else
1078 	{
1079 	  gcc_assert (r->type == ERT_MUST_NOT_THROW);
1080 	  this_call_site = 0;
1081 	}
1082 
1083       if (this_call_site != -1)
1084 	crtl->uses_eh_lsda = 1;
1085 
1086       if (this_call_site == last_call_site)
1087 	continue;
1088 
1089       /* Don't separate a call from it's argument loads.  */
1090       before = insn;
1091       if (CALL_P (insn))
1092 	before = find_first_parameter_load (insn, NULL_RTX);
1093 
1094       start_sequence ();
1095       mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1096 			    sjlj_fc_call_site_ofs);
1097       emit_move_insn (mem, GEN_INT (this_call_site));
1098       p = get_insns ();
1099       end_sequence ();
1100 
1101       emit_insn_before (p, before);
1102       last_call_site = this_call_site;
1103     }
1104 }
1105 
1106 /* Construct the SjLj_Function_Context.  */
1107 
1108 static void
1109 sjlj_emit_function_enter (rtx dispatch_label)
1110 {
1111   rtx fn_begin, fc, mem, seq;
1112   bool fn_begin_outside_block;
1113   rtx personality = get_personality_function (current_function_decl);
1114 
1115   fc = crtl->eh.sjlj_fc;
1116 
1117   start_sequence ();
1118 
1119   /* We're storing this libcall's address into memory instead of
1120      calling it directly.  Thus, we must call assemble_external_libcall
1121      here, as we can not depend on emit_library_call to do it for us.  */
1122   assemble_external_libcall (personality);
1123   mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1124   emit_move_insn (mem, personality);
1125 
1126   mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1127   if (crtl->uses_eh_lsda)
1128     {
1129       char buf[20];
1130       rtx sym;
1131 
1132       ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1133       sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1134       SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1135       emit_move_insn (mem, sym);
1136     }
1137   else
1138     emit_move_insn (mem, const0_rtx);
1139 
1140   if (dispatch_label)
1141     {
1142 #ifdef DONT_USE_BUILTIN_SETJMP
1143       rtx x, last;
1144       x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1145 				   TYPE_MODE (integer_type_node), 1,
1146 				   plus_constant (XEXP (fc, 0),
1147 						  sjlj_fc_jbuf_ofs), Pmode);
1148 
1149       emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1150 			       TYPE_MODE (integer_type_node), 0,
1151 			       dispatch_label);
1152       last = get_last_insn ();
1153       if (JUMP_P (last) && any_condjump_p (last))
1154 	{
1155 	  gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
1156 	  add_reg_note (last, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE / 100));
1157 	}
1158 #else
1159       expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0),
1160 						  sjlj_fc_jbuf_ofs),
1161 				   dispatch_label);
1162 #endif
1163     }
1164 
1165   emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1166 		     1, XEXP (fc, 0), Pmode);
1167 
1168   seq = get_insns ();
1169   end_sequence ();
1170 
1171   /* ??? Instead of doing this at the beginning of the function,
1172      do this in a block that is at loop level 0 and dominates all
1173      can_throw_internal instructions.  */
1174 
1175   fn_begin_outside_block = true;
1176   for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1177     if (NOTE_P (fn_begin))
1178       {
1179 	if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1180 	  break;
1181 	else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1182 	  fn_begin_outside_block = false;
1183       }
1184 
1185   if (fn_begin_outside_block)
1186     insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1187   else
1188     emit_insn_after (seq, fn_begin);
1189 }
1190 
1191 /* Call back from expand_function_end to know where we should put
1192    the call to unwind_sjlj_unregister_libfunc if needed.  */
1193 
1194 void
1195 sjlj_emit_function_exit_after (rtx after)
1196 {
1197   crtl->eh.sjlj_exit_after = after;
1198 }
1199 
1200 static void
1201 sjlj_emit_function_exit (void)
1202 {
1203   rtx seq, insn;
1204 
1205   start_sequence ();
1206 
1207   emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1208 		     1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1209 
1210   seq = get_insns ();
1211   end_sequence ();
1212 
1213   /* ??? Really this can be done in any block at loop level 0 that
1214      post-dominates all can_throw_internal instructions.  This is
1215      the last possible moment.  */
1216 
1217   insn = crtl->eh.sjlj_exit_after;
1218   if (LABEL_P (insn))
1219     insn = NEXT_INSN (insn);
1220 
1221   emit_insn_after (seq, insn);
1222 }
1223 
1224 static void
1225 sjlj_emit_dispatch_table (rtx dispatch_label, int num_dispatch)
1226 {
1227   enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1228   enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
1229   eh_landing_pad lp;
1230   rtx mem, seq, fc, before, exc_ptr_reg, filter_reg;
1231   rtx first_reachable_label;
1232   basic_block bb;
1233   eh_region r;
1234   edge e;
1235   int i, disp_index;
1236   gimple switch_stmt;
1237 
1238   fc = crtl->eh.sjlj_fc;
1239 
1240   start_sequence ();
1241 
1242   emit_label (dispatch_label);
1243 
1244 #ifndef DONT_USE_BUILTIN_SETJMP
1245   expand_builtin_setjmp_receiver (dispatch_label);
1246 
1247   /* The caller of expand_builtin_setjmp_receiver is responsible for
1248      making sure that the label doesn't vanish.  The only other caller
1249      is the expander for __builtin_setjmp_receiver, which places this
1250      label on the nonlocal_goto_label list.  Since we're modeling these
1251      CFG edges more exactly, we can use the forced_labels list instead.  */
1252   LABEL_PRESERVE_P (dispatch_label) = 1;
1253   forced_labels
1254     = gen_rtx_EXPR_LIST (VOIDmode, dispatch_label, forced_labels);
1255 #endif
1256 
1257   /* Load up exc_ptr and filter values from the function context.  */
1258   mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1259   if (unwind_word_mode != ptr_mode)
1260     {
1261 #ifdef POINTERS_EXTEND_UNSIGNED
1262       mem = convert_memory_address (ptr_mode, mem);
1263 #else
1264       mem = convert_to_mode (ptr_mode, mem, 0);
1265 #endif
1266     }
1267   exc_ptr_reg = force_reg (ptr_mode, mem);
1268 
1269   mem = adjust_address (fc, unwind_word_mode,
1270 			sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1271   if (unwind_word_mode != filter_mode)
1272     mem = convert_to_mode (filter_mode, mem, 0);
1273   filter_reg = force_reg (filter_mode, mem);
1274 
1275   /* Jump to one of the directly reachable regions.  */
1276 
1277   disp_index = 0;
1278   first_reachable_label = NULL;
1279 
1280   /* If there's exactly one call site in the function, don't bother
1281      generating a switch statement.  */
1282   switch_stmt = NULL;
1283   if (num_dispatch > 1)
1284     {
1285       tree disp;
1286 
1287       mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1288 			    sjlj_fc_call_site_ofs);
1289       disp = make_tree (integer_type_node, mem);
1290 
1291       switch_stmt = gimple_build_switch_nlabels (num_dispatch, disp, NULL);
1292     }
1293 
1294   for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1295     if (lp && lp->post_landing_pad)
1296       {
1297 	rtx seq2, label;
1298 
1299 	start_sequence ();
1300 
1301 	lp->landing_pad = dispatch_label;
1302 
1303 	if (num_dispatch > 1)
1304 	  {
1305 	    tree t_label, case_elt, t;
1306 
1307 	    t_label = create_artificial_label (UNKNOWN_LOCATION);
1308 	    t = build_int_cst (integer_type_node, disp_index);
1309 	    case_elt = build_case_label (t, NULL, t_label);
1310 	    gimple_switch_set_label (switch_stmt, disp_index, case_elt);
1311 
1312 	    label = label_rtx (t_label);
1313 	  }
1314 	else
1315 	  label = gen_label_rtx ();
1316 
1317 	if (disp_index == 0)
1318 	  first_reachable_label = label;
1319 	emit_label (label);
1320 
1321 	r = lp->region;
1322 	if (r->exc_ptr_reg)
1323 	  emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1324 	if (r->filter_reg)
1325 	  emit_move_insn (r->filter_reg, filter_reg);
1326 
1327 	seq2 = get_insns ();
1328 	end_sequence ();
1329 
1330 	before = label_rtx (lp->post_landing_pad);
1331 	bb = emit_to_new_bb_before (seq2, before);
1332 	e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1333 	e->count = bb->count;
1334 	e->probability = REG_BR_PROB_BASE;
1335 
1336 	disp_index++;
1337       }
1338   gcc_assert (disp_index == num_dispatch);
1339 
1340   if (num_dispatch > 1)
1341     {
1342       expand_case (switch_stmt);
1343       expand_builtin_trap ();
1344     }
1345 
1346   seq = get_insns ();
1347   end_sequence ();
1348 
1349   bb = emit_to_new_bb_before (seq, first_reachable_label);
1350   if (num_dispatch == 1)
1351     {
1352       e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1353       e->count = bb->count;
1354       e->probability = REG_BR_PROB_BASE;
1355     }
1356 }
1357 
1358 static void
1359 sjlj_build_landing_pads (void)
1360 {
1361   int num_dispatch;
1362 
1363   num_dispatch = VEC_length (eh_landing_pad, cfun->eh->lp_array);
1364   if (num_dispatch == 0)
1365     return;
1366   VEC_safe_grow (int, heap, sjlj_lp_call_site_index, num_dispatch);
1367 
1368   num_dispatch = sjlj_assign_call_site_values ();
1369   if (num_dispatch > 0)
1370     {
1371       rtx dispatch_label = gen_label_rtx ();
1372       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1373 					TYPE_MODE (sjlj_fc_type_node),
1374 					TYPE_ALIGN (sjlj_fc_type_node));
1375       crtl->eh.sjlj_fc
1376 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1377 			      int_size_in_bytes (sjlj_fc_type_node),
1378 			      align);
1379 
1380       sjlj_mark_call_sites ();
1381       sjlj_emit_function_enter (dispatch_label);
1382       sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1383       sjlj_emit_function_exit ();
1384     }
1385 
1386   /* If we do not have any landing pads, we may still need to register a
1387      personality routine and (empty) LSDA to handle must-not-throw regions.  */
1388   else if (function_needs_eh_personality (cfun) != eh_personality_none)
1389     {
1390       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1391 					TYPE_MODE (sjlj_fc_type_node),
1392 					TYPE_ALIGN (sjlj_fc_type_node));
1393       crtl->eh.sjlj_fc
1394 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1395 			      int_size_in_bytes (sjlj_fc_type_node),
1396 			      align);
1397 
1398       sjlj_mark_call_sites ();
1399       sjlj_emit_function_enter (NULL_RTX);
1400       sjlj_emit_function_exit ();
1401     }
1402 
1403   VEC_free (int, heap, sjlj_lp_call_site_index);
1404 }
1405 
1406 /* After initial rtl generation, call back to finish generating
1407    exception support code.  */
1408 
1409 static void
1410 finish_eh_generation (void)
1411 {
1412   basic_block bb;
1413 
1414   /* Construct the landing pads.  */
1415   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1416     sjlj_build_landing_pads ();
1417   else
1418     dw2_build_landing_pads ();
1419   break_superblocks ();
1420 
1421   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1422       /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx).  */
1423       || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
1424     commit_edge_insertions ();
1425 
1426   /* Redirect all EH edges from the post_landing_pad to the landing pad.  */
1427   FOR_EACH_BB (bb)
1428     {
1429       eh_landing_pad lp;
1430       edge_iterator ei;
1431       edge e;
1432 
1433       lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1434 
1435       FOR_EACH_EDGE (e, ei, bb->succs)
1436 	if (e->flags & EDGE_EH)
1437 	  break;
1438 
1439       /* We should not have generated any new throwing insns during this
1440 	 pass, and we should not have lost any EH edges, so we only need
1441 	 to handle two cases here:
1442 	 (1) reachable handler and an existing edge to post-landing-pad,
1443 	 (2) no reachable handler and no edge.  */
1444       gcc_assert ((lp != NULL) == (e != NULL));
1445       if (lp != NULL)
1446 	{
1447 	  gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1448 
1449 	  redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1450 	  e->flags |= (CALL_P (BB_END (bb))
1451 		       ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1452 		       : EDGE_ABNORMAL);
1453 	}
1454     }
1455 }
1456 
1457 static bool
1458 gate_handle_eh (void)
1459 {
1460   /* Nothing to do if no regions created.  */
1461   return cfun->eh->region_tree != NULL;
1462 }
1463 
1464 /* Complete generation of exception handling code.  */
1465 static unsigned int
1466 rest_of_handle_eh (void)
1467 {
1468   finish_eh_generation ();
1469   cleanup_cfg (CLEANUP_NO_INSN_DEL);
1470   return 0;
1471 }
1472 
1473 struct rtl_opt_pass pass_rtl_eh =
1474 {
1475  {
1476   RTL_PASS,
1477   "rtl_eh",                             /* name */
1478   gate_handle_eh,                       /* gate */
1479   rest_of_handle_eh,			/* execute */
1480   NULL,                                 /* sub */
1481   NULL,                                 /* next */
1482   0,                                    /* static_pass_number */
1483   TV_JUMP,                              /* tv_id */
1484   0,                                    /* properties_required */
1485   0,                                    /* properties_provided */
1486   0,                                    /* properties_destroyed */
1487   0,                                    /* todo_flags_start */
1488   0                                     /* todo_flags_finish */
1489  }
1490 };
1491 
1492 /* This section handles removing dead code for flow.  */
1493 
1494 void
1495 remove_eh_landing_pad (eh_landing_pad lp)
1496 {
1497   eh_landing_pad *pp;
1498 
1499   for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1500     continue;
1501   *pp = lp->next_lp;
1502 
1503   if (lp->post_landing_pad)
1504     EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1505   VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
1506 }
1507 
1508 /* Splice REGION from the region tree.  */
1509 
1510 void
1511 remove_eh_handler (eh_region region)
1512 {
1513   eh_region *pp, *pp_start, p, outer;
1514   eh_landing_pad lp;
1515 
1516   for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1517     {
1518       if (lp->post_landing_pad)
1519 	EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1520       VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
1521     }
1522 
1523   outer = region->outer;
1524   if (outer)
1525     pp_start = &outer->inner;
1526   else
1527     pp_start = &cfun->eh->region_tree;
1528   for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1529     continue;
1530   if (region->inner)
1531     {
1532       *pp = p = region->inner;
1533       do
1534 	{
1535 	  p->outer = outer;
1536 	  pp = &p->next_peer;
1537 	  p = *pp;
1538 	}
1539       while (p);
1540     }
1541   *pp = region->next_peer;
1542 
1543   VEC_replace (eh_region, cfun->eh->region_array, region->index, NULL);
1544 }
1545 
1546 /* Invokes CALLBACK for every exception handler landing pad label.
1547    Only used by reload hackery; should not be used by new code.  */
1548 
1549 void
1550 for_each_eh_label (void (*callback) (rtx))
1551 {
1552   eh_landing_pad lp;
1553   int i;
1554 
1555   for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1556     {
1557       if (lp)
1558 	{
1559 	  rtx lab = lp->landing_pad;
1560 	  if (lab && LABEL_P (lab))
1561 	    (*callback) (lab);
1562 	}
1563     }
1564 }
1565 
1566 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1567    call insn.
1568 
1569    At the gimple level, we use LP_NR
1570        > 0 : The statement transfers to landing pad LP_NR
1571        = 0 : The statement is outside any EH region
1572        < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1573 
1574    At the rtl level, we use LP_NR
1575        > 0 : The insn transfers to landing pad LP_NR
1576        = 0 : The insn cannot throw
1577        < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1578        = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1579        missing note: The insn is outside any EH region.
1580 
1581   ??? This difference probably ought to be avoided.  We could stand
1582   to record nothrow for arbitrary gimple statements, and so avoid
1583   some moderately complex lookups in stmt_could_throw_p.  Perhaps
1584   NOTHROW should be mapped on both sides to INT_MIN.  Perhaps the
1585   no-nonlocal-goto property should be recorded elsewhere as a bit
1586   on the call_insn directly.  Perhaps we should make more use of
1587   attaching the trees to call_insns (reachable via symbol_ref in
1588   direct call cases) and just pull the data out of the trees.  */
1589 
1590 void
1591 make_reg_eh_region_note (rtx insn, int ecf_flags, int lp_nr)
1592 {
1593   rtx value;
1594   if (ecf_flags & ECF_NOTHROW)
1595     value = const0_rtx;
1596   else if (lp_nr != 0)
1597     value = GEN_INT (lp_nr);
1598   else
1599     return;
1600   add_reg_note (insn, REG_EH_REGION, value);
1601 }
1602 
1603 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1604    nor perform a non-local goto.  Replace the region note if it
1605    already exists.  */
1606 
1607 void
1608 make_reg_eh_region_note_nothrow_nononlocal (rtx insn)
1609 {
1610   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1611   rtx intmin = GEN_INT (INT_MIN);
1612 
1613   if (note != 0)
1614     XEXP (note, 0) = intmin;
1615   else
1616     add_reg_note (insn, REG_EH_REGION, intmin);
1617 }
1618 
1619 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1620    to the contrary.  */
1621 
1622 bool
1623 insn_could_throw_p (const_rtx insn)
1624 {
1625   if (!flag_exceptions)
1626     return false;
1627   if (CALL_P (insn))
1628     return true;
1629   if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1630     return may_trap_p (PATTERN (insn));
1631   return false;
1632 }
1633 
1634 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1635    at FIRST and ending at LAST.  NOTE_OR_INSN is either the source insn
1636    to look for a note, or the note itself.  */
1637 
1638 void
1639 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
1640 {
1641   rtx insn, note = note_or_insn;
1642 
1643   if (INSN_P (note_or_insn))
1644     {
1645       note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1646       if (note == NULL)
1647 	return;
1648     }
1649   note = XEXP (note, 0);
1650 
1651   for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1652     if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1653         && insn_could_throw_p (insn))
1654       add_reg_note (insn, REG_EH_REGION, note);
1655 }
1656 
1657 /* Likewise, but iterate backward.  */
1658 
1659 void
1660 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx last, rtx first)
1661 {
1662   rtx insn, note = note_or_insn;
1663 
1664   if (INSN_P (note_or_insn))
1665     {
1666       note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1667       if (note == NULL)
1668 	return;
1669     }
1670   note = XEXP (note, 0);
1671 
1672   for (insn = last; insn != first; insn = PREV_INSN (insn))
1673     if (insn_could_throw_p (insn))
1674       add_reg_note (insn, REG_EH_REGION, note);
1675 }
1676 
1677 
1678 /* Extract all EH information from INSN.  Return true if the insn
1679    was marked NOTHROW.  */
1680 
1681 static bool
1682 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1683 			       eh_landing_pad *plp)
1684 {
1685   eh_landing_pad lp = NULL;
1686   eh_region r = NULL;
1687   bool ret = false;
1688   rtx note;
1689   int lp_nr;
1690 
1691   if (! INSN_P (insn))
1692     goto egress;
1693 
1694   if (NONJUMP_INSN_P (insn)
1695       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1696     insn = XVECEXP (PATTERN (insn), 0, 0);
1697 
1698   note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1699   if (!note)
1700     {
1701       ret = !insn_could_throw_p (insn);
1702       goto egress;
1703     }
1704 
1705   lp_nr = INTVAL (XEXP (note, 0));
1706   if (lp_nr == 0 || lp_nr == INT_MIN)
1707     {
1708       ret = true;
1709       goto egress;
1710     }
1711 
1712   if (lp_nr < 0)
1713     r = VEC_index (eh_region, cfun->eh->region_array, -lp_nr);
1714   else
1715     {
1716       lp = VEC_index (eh_landing_pad, cfun->eh->lp_array, lp_nr);
1717       r = lp->region;
1718     }
1719 
1720  egress:
1721   *plp = lp;
1722   *pr = r;
1723   return ret;
1724 }
1725 
1726 /* Return the landing pad to which INSN may go, or NULL if it does not
1727    have a reachable landing pad within this function.  */
1728 
1729 eh_landing_pad
1730 get_eh_landing_pad_from_rtx (const_rtx insn)
1731 {
1732   eh_landing_pad lp;
1733   eh_region r;
1734 
1735   get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1736   return lp;
1737 }
1738 
1739 /* Return the region to which INSN may go, or NULL if it does not
1740    have a reachable region within this function.  */
1741 
1742 eh_region
1743 get_eh_region_from_rtx (const_rtx insn)
1744 {
1745   eh_landing_pad lp;
1746   eh_region r;
1747 
1748   get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1749   return r;
1750 }
1751 
1752 /* Return true if INSN throws and is caught by something in this function.  */
1753 
1754 bool
1755 can_throw_internal (const_rtx insn)
1756 {
1757   return get_eh_landing_pad_from_rtx (insn) != NULL;
1758 }
1759 
1760 /* Return true if INSN throws and escapes from the current function.  */
1761 
1762 bool
1763 can_throw_external (const_rtx insn)
1764 {
1765   eh_landing_pad lp;
1766   eh_region r;
1767   bool nothrow;
1768 
1769   if (! INSN_P (insn))
1770     return false;
1771 
1772   if (NONJUMP_INSN_P (insn)
1773       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1774     {
1775       rtx seq = PATTERN (insn);
1776       int i, n = XVECLEN (seq, 0);
1777 
1778       for (i = 0; i < n; i++)
1779 	if (can_throw_external (XVECEXP (seq, 0, i)))
1780 	  return true;
1781 
1782       return false;
1783     }
1784 
1785   nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1786 
1787   /* If we can't throw, we obviously can't throw external.  */
1788   if (nothrow)
1789     return false;
1790 
1791   /* If we have an internal landing pad, then we're not external.  */
1792   if (lp != NULL)
1793     return false;
1794 
1795   /* If we're not within an EH region, then we are external.  */
1796   if (r == NULL)
1797     return true;
1798 
1799   /* The only thing that ought to be left is MUST_NOT_THROW regions,
1800      which don't always have landing pads.  */
1801   gcc_assert (r->type == ERT_MUST_NOT_THROW);
1802   return false;
1803 }
1804 
1805 /* Return true if INSN cannot throw at all.  */
1806 
1807 bool
1808 insn_nothrow_p (const_rtx insn)
1809 {
1810   eh_landing_pad lp;
1811   eh_region r;
1812 
1813   if (! INSN_P (insn))
1814     return true;
1815 
1816   if (NONJUMP_INSN_P (insn)
1817       && GET_CODE (PATTERN (insn)) == SEQUENCE)
1818     {
1819       rtx seq = PATTERN (insn);
1820       int i, n = XVECLEN (seq, 0);
1821 
1822       for (i = 0; i < n; i++)
1823 	if (!insn_nothrow_p (XVECEXP (seq, 0, i)))
1824 	  return false;
1825 
1826       return true;
1827     }
1828 
1829   return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1830 }
1831 
1832 /* Return true if INSN can perform a non-local goto.  */
1833 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION.  */
1834 
1835 bool
1836 can_nonlocal_goto (const_rtx insn)
1837 {
1838   if (nonlocal_goto_handler_labels && CALL_P (insn))
1839     {
1840       rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1841       if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1842 	return true;
1843     }
1844   return false;
1845 }
1846 
1847 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls.  */
1848 
1849 static unsigned int
1850 set_nothrow_function_flags (void)
1851 {
1852   rtx insn;
1853 
1854   crtl->nothrow = 1;
1855 
1856   /* Assume crtl->all_throwers_are_sibcalls until we encounter
1857      something that can throw an exception.  We specifically exempt
1858      CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1859      and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
1860      is optimistic.  */
1861 
1862   crtl->all_throwers_are_sibcalls = 1;
1863 
1864   /* If we don't know that this implementation of the function will
1865      actually be used, then we must not set TREE_NOTHROW, since
1866      callers must not assume that this function does not throw.  */
1867   if (TREE_NOTHROW (current_function_decl))
1868     return 0;
1869 
1870   if (! flag_exceptions)
1871     return 0;
1872 
1873   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1874     if (can_throw_external (insn))
1875       {
1876         crtl->nothrow = 0;
1877 
1878 	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1879 	  {
1880 	    crtl->all_throwers_are_sibcalls = 0;
1881 	    return 0;
1882 	  }
1883       }
1884 
1885   for (insn = crtl->epilogue_delay_list; insn;
1886        insn = XEXP (insn, 1))
1887     if (can_throw_external (insn))
1888       {
1889         crtl->nothrow = 0;
1890 
1891 	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1892 	  {
1893 	    crtl->all_throwers_are_sibcalls = 0;
1894 	    return 0;
1895 	  }
1896       }
1897   if (crtl->nothrow
1898       && (cgraph_function_body_availability (cgraph_get_node
1899 					     (current_function_decl))
1900           >= AVAIL_AVAILABLE))
1901     {
1902       struct cgraph_node *node = cgraph_get_node (current_function_decl);
1903       struct cgraph_edge *e;
1904       for (e = node->callers; e; e = e->next_caller)
1905         e->can_throw_external = false;
1906       cgraph_set_nothrow_flag (node, true);
1907 
1908       if (dump_file)
1909 	fprintf (dump_file, "Marking function nothrow: %s\n\n",
1910 		 current_function_name ());
1911     }
1912   return 0;
1913 }
1914 
1915 struct rtl_opt_pass pass_set_nothrow_function_flags =
1916 {
1917  {
1918   RTL_PASS,
1919   "nothrow",                            /* name */
1920   NULL,                                 /* gate */
1921   set_nothrow_function_flags,           /* execute */
1922   NULL,                                 /* sub */
1923   NULL,                                 /* next */
1924   0,                                    /* static_pass_number */
1925   TV_NONE,                              /* tv_id */
1926   0,                                    /* properties_required */
1927   0,                                    /* properties_provided */
1928   0,                                    /* properties_destroyed */
1929   0,                                    /* todo_flags_start */
1930   0                                     /* todo_flags_finish */
1931  }
1932 };
1933 
1934 
1935 /* Various hooks for unwind library.  */
1936 
1937 /* Expand the EH support builtin functions:
1938    __builtin_eh_pointer and __builtin_eh_filter.  */
1939 
1940 static eh_region
1941 expand_builtin_eh_common (tree region_nr_t)
1942 {
1943   HOST_WIDE_INT region_nr;
1944   eh_region region;
1945 
1946   gcc_assert (host_integerp (region_nr_t, 0));
1947   region_nr = tree_low_cst (region_nr_t, 0);
1948 
1949   region = VEC_index (eh_region, cfun->eh->region_array, region_nr);
1950 
1951   /* ??? We shouldn't have been able to delete a eh region without
1952      deleting all the code that depended on it.  */
1953   gcc_assert (region != NULL);
1954 
1955   return region;
1956 }
1957 
1958 /* Expand to the exc_ptr value from the given eh region.  */
1959 
1960 rtx
1961 expand_builtin_eh_pointer (tree exp)
1962 {
1963   eh_region region
1964     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
1965   if (region->exc_ptr_reg == NULL)
1966     region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
1967   return region->exc_ptr_reg;
1968 }
1969 
1970 /* Expand to the filter value from the given eh region.  */
1971 
1972 rtx
1973 expand_builtin_eh_filter (tree exp)
1974 {
1975   eh_region region
1976     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
1977   if (region->filter_reg == NULL)
1978     region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
1979   return region->filter_reg;
1980 }
1981 
1982 /* Copy the exc_ptr and filter values from one landing pad's registers
1983    to another.  This is used to inline the resx statement.  */
1984 
1985 rtx
1986 expand_builtin_eh_copy_values (tree exp)
1987 {
1988   eh_region dst
1989     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
1990   eh_region src
1991     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
1992   enum machine_mode fmode = targetm.eh_return_filter_mode ();
1993 
1994   if (dst->exc_ptr_reg == NULL)
1995     dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
1996   if (src->exc_ptr_reg == NULL)
1997     src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
1998 
1999   if (dst->filter_reg == NULL)
2000     dst->filter_reg = gen_reg_rtx (fmode);
2001   if (src->filter_reg == NULL)
2002     src->filter_reg = gen_reg_rtx (fmode);
2003 
2004   emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2005   emit_move_insn (dst->filter_reg, src->filter_reg);
2006 
2007   return const0_rtx;
2008 }
2009 
2010 /* Do any necessary initialization to access arbitrary stack frames.
2011    On the SPARC, this means flushing the register windows.  */
2012 
2013 void
2014 expand_builtin_unwind_init (void)
2015 {
2016   /* Set this so all the registers get saved in our frame; we need to be
2017      able to copy the saved values for any registers from frames we unwind.  */
2018   crtl->saves_all_registers = 1;
2019 
2020 #ifdef SETUP_FRAME_ADDRESSES
2021   SETUP_FRAME_ADDRESSES ();
2022 #endif
2023 }
2024 
2025 /* Map a non-negative number to an eh return data register number; expands
2026    to -1 if no return data register is associated with the input number.
2027    At least the inputs 0 and 1 must be mapped; the target may provide more.  */
2028 
2029 rtx
2030 expand_builtin_eh_return_data_regno (tree exp)
2031 {
2032   tree which = CALL_EXPR_ARG (exp, 0);
2033   unsigned HOST_WIDE_INT iwhich;
2034 
2035   if (TREE_CODE (which) != INTEGER_CST)
2036     {
2037       error ("argument of %<__builtin_eh_return_regno%> must be constant");
2038       return constm1_rtx;
2039     }
2040 
2041   iwhich = tree_low_cst (which, 1);
2042   iwhich = EH_RETURN_DATA_REGNO (iwhich);
2043   if (iwhich == INVALID_REGNUM)
2044     return constm1_rtx;
2045 
2046 #ifdef DWARF_FRAME_REGNUM
2047   iwhich = DWARF_FRAME_REGNUM (iwhich);
2048 #else
2049   iwhich = DBX_REGISTER_NUMBER (iwhich);
2050 #endif
2051 
2052   return GEN_INT (iwhich);
2053 }
2054 
2055 /* Given a value extracted from the return address register or stack slot,
2056    return the actual address encoded in that value.  */
2057 
2058 rtx
2059 expand_builtin_extract_return_addr (tree addr_tree)
2060 {
2061   rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2062 
2063   if (GET_MODE (addr) != Pmode
2064       && GET_MODE (addr) != VOIDmode)
2065     {
2066 #ifdef POINTERS_EXTEND_UNSIGNED
2067       addr = convert_memory_address (Pmode, addr);
2068 #else
2069       addr = convert_to_mode (Pmode, addr, 0);
2070 #endif
2071     }
2072 
2073   /* First mask out any unwanted bits.  */
2074 #ifdef MASK_RETURN_ADDR
2075   expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2076 #endif
2077 
2078   /* Then adjust to find the real return address.  */
2079 #if defined (RETURN_ADDR_OFFSET)
2080   addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2081 #endif
2082 
2083   return addr;
2084 }
2085 
2086 /* Given an actual address in addr_tree, do any necessary encoding
2087    and return the value to be stored in the return address register or
2088    stack slot so the epilogue will return to that address.  */
2089 
2090 rtx
2091 expand_builtin_frob_return_addr (tree addr_tree)
2092 {
2093   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2094 
2095   addr = convert_memory_address (Pmode, addr);
2096 
2097 #ifdef RETURN_ADDR_OFFSET
2098   addr = force_reg (Pmode, addr);
2099   addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2100 #endif
2101 
2102   return addr;
2103 }
2104 
2105 /* Set up the epilogue with the magic bits we'll need to return to the
2106    exception handler.  */
2107 
2108 void
2109 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2110 			  tree handler_tree)
2111 {
2112   rtx tmp;
2113 
2114 #ifdef EH_RETURN_STACKADJ_RTX
2115   tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2116 		     VOIDmode, EXPAND_NORMAL);
2117   tmp = convert_memory_address (Pmode, tmp);
2118   if (!crtl->eh.ehr_stackadj)
2119     crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2120   else if (tmp != crtl->eh.ehr_stackadj)
2121     emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2122 #endif
2123 
2124   tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2125 		     VOIDmode, EXPAND_NORMAL);
2126   tmp = convert_memory_address (Pmode, tmp);
2127   if (!crtl->eh.ehr_handler)
2128     crtl->eh.ehr_handler = copy_to_reg (tmp);
2129   else if (tmp != crtl->eh.ehr_handler)
2130     emit_move_insn (crtl->eh.ehr_handler, tmp);
2131 
2132   if (!crtl->eh.ehr_label)
2133     crtl->eh.ehr_label = gen_label_rtx ();
2134   emit_jump (crtl->eh.ehr_label);
2135 }
2136 
2137 /* Expand __builtin_eh_return.  This exit path from the function loads up
2138    the eh return data registers, adjusts the stack, and branches to a
2139    given PC other than the normal return address.  */
2140 
2141 void
2142 expand_eh_return (void)
2143 {
2144   rtx around_label;
2145 
2146   if (! crtl->eh.ehr_label)
2147     return;
2148 
2149   crtl->calls_eh_return = 1;
2150 
2151 #ifdef EH_RETURN_STACKADJ_RTX
2152   emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2153 #endif
2154 
2155   around_label = gen_label_rtx ();
2156   emit_jump (around_label);
2157 
2158   emit_label (crtl->eh.ehr_label);
2159   clobber_return_register ();
2160 
2161 #ifdef EH_RETURN_STACKADJ_RTX
2162   emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2163 #endif
2164 
2165 #ifdef HAVE_eh_return
2166   if (HAVE_eh_return)
2167     emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2168   else
2169 #endif
2170     {
2171 #ifdef EH_RETURN_HANDLER_RTX
2172       emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2173 #else
2174       error ("__builtin_eh_return not supported on this target");
2175 #endif
2176     }
2177 
2178   emit_label (around_label);
2179 }
2180 
2181 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2182    POINTERS_EXTEND_UNSIGNED and return it.  */
2183 
2184 rtx
2185 expand_builtin_extend_pointer (tree addr_tree)
2186 {
2187   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2188   int extend;
2189 
2190 #ifdef POINTERS_EXTEND_UNSIGNED
2191   extend = POINTERS_EXTEND_UNSIGNED;
2192 #else
2193   /* The previous EH code did an unsigned extend by default, so we do this also
2194      for consistency.  */
2195   extend = 1;
2196 #endif
2197 
2198   return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2199 }
2200 
2201 /* In the following functions, we represent entries in the action table
2202    as 1-based indices.  Special cases are:
2203 
2204 	 0:	null action record, non-null landing pad; implies cleanups
2205 	-1:	null action record, null landing pad; implies no action
2206 	-2:	no call-site entry; implies must_not_throw
2207 	-3:	we have yet to process outer regions
2208 
2209    Further, no special cases apply to the "next" field of the record.
2210    For next, 0 means end of list.  */
2211 
2212 struct action_record
2213 {
2214   int offset;
2215   int filter;
2216   int next;
2217 };
2218 
2219 static int
2220 action_record_eq (const void *pentry, const void *pdata)
2221 {
2222   const struct action_record *entry = (const struct action_record *) pentry;
2223   const struct action_record *data = (const struct action_record *) pdata;
2224   return entry->filter == data->filter && entry->next == data->next;
2225 }
2226 
2227 static hashval_t
2228 action_record_hash (const void *pentry)
2229 {
2230   const struct action_record *entry = (const struct action_record *) pentry;
2231   return entry->next * 1009 + entry->filter;
2232 }
2233 
2234 static int
2235 add_action_record (htab_t ar_hash, int filter, int next)
2236 {
2237   struct action_record **slot, *new_ar, tmp;
2238 
2239   tmp.filter = filter;
2240   tmp.next = next;
2241   slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
2242 
2243   if ((new_ar = *slot) == NULL)
2244     {
2245       new_ar = XNEW (struct action_record);
2246       new_ar->offset = VEC_length (uchar, crtl->eh.action_record_data) + 1;
2247       new_ar->filter = filter;
2248       new_ar->next = next;
2249       *slot = new_ar;
2250 
2251       /* The filter value goes in untouched.  The link to the next
2252 	 record is a "self-relative" byte offset, or zero to indicate
2253 	 that there is no next record.  So convert the absolute 1 based
2254 	 indices we've been carrying around into a displacement.  */
2255 
2256       push_sleb128 (&crtl->eh.action_record_data, filter);
2257       if (next)
2258 	next -= VEC_length (uchar, crtl->eh.action_record_data) + 1;
2259       push_sleb128 (&crtl->eh.action_record_data, next);
2260     }
2261 
2262   return new_ar->offset;
2263 }
2264 
2265 static int
2266 collect_one_action_chain (htab_t ar_hash, eh_region region)
2267 {
2268   int next;
2269 
2270   /* If we've reached the top of the region chain, then we have
2271      no actions, and require no landing pad.  */
2272   if (region == NULL)
2273     return -1;
2274 
2275   switch (region->type)
2276     {
2277     case ERT_CLEANUP:
2278       {
2279 	eh_region r;
2280 	/* A cleanup adds a zero filter to the beginning of the chain, but
2281 	   there are special cases to look out for.  If there are *only*
2282 	   cleanups along a path, then it compresses to a zero action.
2283 	   Further, if there are multiple cleanups along a path, we only
2284 	   need to represent one of them, as that is enough to trigger
2285 	   entry to the landing pad at runtime.  */
2286 	next = collect_one_action_chain (ar_hash, region->outer);
2287 	if (next <= 0)
2288 	  return 0;
2289 	for (r = region->outer; r ; r = r->outer)
2290 	  if (r->type == ERT_CLEANUP)
2291 	    return next;
2292 	return add_action_record (ar_hash, 0, next);
2293       }
2294 
2295     case ERT_TRY:
2296       {
2297 	eh_catch c;
2298 
2299 	/* Process the associated catch regions in reverse order.
2300 	   If there's a catch-all handler, then we don't need to
2301 	   search outer regions.  Use a magic -3 value to record
2302 	   that we haven't done the outer search.  */
2303 	next = -3;
2304 	for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2305 	  {
2306 	    if (c->type_list == NULL)
2307 	      {
2308 		/* Retrieve the filter from the head of the filter list
2309 		   where we have stored it (see assign_filter_values).  */
2310 		int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2311 		next = add_action_record (ar_hash, filter, 0);
2312 	      }
2313 	    else
2314 	      {
2315 		/* Once the outer search is done, trigger an action record for
2316 		   each filter we have.  */
2317 		tree flt_node;
2318 
2319 		if (next == -3)
2320 		  {
2321 		    next = collect_one_action_chain (ar_hash, region->outer);
2322 
2323 		    /* If there is no next action, terminate the chain.  */
2324 		    if (next == -1)
2325 		      next = 0;
2326 		    /* If all outer actions are cleanups or must_not_throw,
2327 		       we'll have no action record for it, since we had wanted
2328 		       to encode these states in the call-site record directly.
2329 		       Add a cleanup action to the chain to catch these.  */
2330 		    else if (next <= 0)
2331 		      next = add_action_record (ar_hash, 0, 0);
2332 		  }
2333 
2334 		flt_node = c->filter_list;
2335 		for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2336 		  {
2337 		    int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2338 		    next = add_action_record (ar_hash, filter, next);
2339 		  }
2340 	      }
2341 	  }
2342 	return next;
2343       }
2344 
2345     case ERT_ALLOWED_EXCEPTIONS:
2346       /* An exception specification adds its filter to the
2347 	 beginning of the chain.  */
2348       next = collect_one_action_chain (ar_hash, region->outer);
2349 
2350       /* If there is no next action, terminate the chain.  */
2351       if (next == -1)
2352 	next = 0;
2353       /* If all outer actions are cleanups or must_not_throw,
2354 	 we'll have no action record for it, since we had wanted
2355 	 to encode these states in the call-site record directly.
2356 	 Add a cleanup action to the chain to catch these.  */
2357       else if (next <= 0)
2358 	next = add_action_record (ar_hash, 0, 0);
2359 
2360       return add_action_record (ar_hash, region->u.allowed.filter, next);
2361 
2362     case ERT_MUST_NOT_THROW:
2363       /* A must-not-throw region with no inner handlers or cleanups
2364 	 requires no call-site entry.  Note that this differs from
2365 	 the no handler or cleanup case in that we do require an lsda
2366 	 to be generated.  Return a magic -2 value to record this.  */
2367       return -2;
2368     }
2369 
2370   gcc_unreachable ();
2371 }
2372 
2373 static int
2374 add_call_site (rtx landing_pad, int action, int section)
2375 {
2376   call_site_record record;
2377 
2378   record = ggc_alloc_call_site_record_d ();
2379   record->landing_pad = landing_pad;
2380   record->action = action;
2381 
2382   VEC_safe_push (call_site_record, gc,
2383 		 crtl->eh.call_site_record[section], record);
2384 
2385   return call_site_base + VEC_length (call_site_record,
2386 				      crtl->eh.call_site_record[section]) - 1;
2387 }
2388 
2389 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2390    The new note numbers will not refer to region numbers, but
2391    instead to call site entries.  */
2392 
2393 static unsigned int
2394 convert_to_eh_region_ranges (void)
2395 {
2396   rtx insn, iter, note;
2397   htab_t ar_hash;
2398   int last_action = -3;
2399   rtx last_action_insn = NULL_RTX;
2400   rtx last_landing_pad = NULL_RTX;
2401   rtx first_no_action_insn = NULL_RTX;
2402   int call_site = 0;
2403   int cur_sec = 0;
2404   rtx section_switch_note = NULL_RTX;
2405   rtx first_no_action_insn_before_switch = NULL_RTX;
2406   rtx last_no_action_insn_before_switch = NULL_RTX;
2407   int saved_call_site_base = call_site_base;
2408 
2409   crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
2410 
2411   ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2412 
2413   for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2414     if (INSN_P (iter))
2415       {
2416 	eh_landing_pad lp;
2417 	eh_region region;
2418 	bool nothrow;
2419 	int this_action;
2420 	rtx this_landing_pad;
2421 
2422 	insn = iter;
2423 	if (NONJUMP_INSN_P (insn)
2424 	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
2425 	  insn = XVECEXP (PATTERN (insn), 0, 0);
2426 
2427 	nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2428 	if (nothrow)
2429 	  continue;
2430 	if (region)
2431 	  this_action = collect_one_action_chain (ar_hash, region);
2432 	else
2433 	  this_action = -1;
2434 
2435 	/* Existence of catch handlers, or must-not-throw regions
2436 	   implies that an lsda is needed (even if empty).  */
2437 	if (this_action != -1)
2438 	  crtl->uses_eh_lsda = 1;
2439 
2440 	/* Delay creation of region notes for no-action regions
2441 	   until we're sure that an lsda will be required.  */
2442 	else if (last_action == -3)
2443 	  {
2444 	    first_no_action_insn = iter;
2445 	    last_action = -1;
2446 	  }
2447 
2448 	if (this_action >= 0)
2449 	  this_landing_pad = lp->landing_pad;
2450 	else
2451 	  this_landing_pad = NULL_RTX;
2452 
2453 	/* Differing actions or landing pads implies a change in call-site
2454 	   info, which implies some EH_REGION note should be emitted.  */
2455 	if (last_action != this_action
2456 	    || last_landing_pad != this_landing_pad)
2457 	  {
2458 	    /* If there is a queued no-action region in the other section
2459 	       with hot/cold partitioning, emit it now.  */
2460 	    if (first_no_action_insn_before_switch)
2461 	      {
2462 		gcc_assert (this_action != -1
2463 			    && last_action == (first_no_action_insn
2464 					       ? -1 : -3));
2465 		call_site = add_call_site (NULL_RTX, 0, 0);
2466 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2467 					 first_no_action_insn_before_switch);
2468 		NOTE_EH_HANDLER (note) = call_site;
2469 		note = emit_note_after (NOTE_INSN_EH_REGION_END,
2470 					last_no_action_insn_before_switch);
2471 		NOTE_EH_HANDLER (note) = call_site;
2472 		gcc_assert (last_action != -3
2473 			    || (last_action_insn
2474 				== last_no_action_insn_before_switch));
2475 		first_no_action_insn_before_switch = NULL_RTX;
2476 		last_no_action_insn_before_switch = NULL_RTX;
2477 		call_site_base++;
2478 	      }
2479 	    /* If we'd not seen a previous action (-3) or the previous
2480 	       action was must-not-throw (-2), then we do not need an
2481 	       end note.  */
2482 	    if (last_action >= -1)
2483 	      {
2484 		/* If we delayed the creation of the begin, do it now.  */
2485 		if (first_no_action_insn)
2486 		  {
2487 		    call_site = add_call_site (NULL_RTX, 0, cur_sec);
2488 		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2489 					     first_no_action_insn);
2490 		    NOTE_EH_HANDLER (note) = call_site;
2491 		    first_no_action_insn = NULL_RTX;
2492 		  }
2493 
2494 		note = emit_note_after (NOTE_INSN_EH_REGION_END,
2495 					last_action_insn);
2496 		NOTE_EH_HANDLER (note) = call_site;
2497 	      }
2498 
2499 	    /* If the new action is must-not-throw, then no region notes
2500 	       are created.  */
2501 	    if (this_action >= -1)
2502 	      {
2503 		call_site = add_call_site (this_landing_pad,
2504 					   this_action < 0 ? 0 : this_action,
2505 					   cur_sec);
2506 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2507 		NOTE_EH_HANDLER (note) = call_site;
2508 	      }
2509 
2510 	    last_action = this_action;
2511 	    last_landing_pad = this_landing_pad;
2512 	  }
2513 	last_action_insn = iter;
2514       }
2515     else if (NOTE_P (iter)
2516 	     && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2517       {
2518 	gcc_assert (section_switch_note == NULL_RTX);
2519 	gcc_assert (flag_reorder_blocks_and_partition);
2520 	section_switch_note = iter;
2521 	if (first_no_action_insn)
2522 	  {
2523 	    first_no_action_insn_before_switch = first_no_action_insn;
2524 	    last_no_action_insn_before_switch = last_action_insn;
2525 	    first_no_action_insn = NULL_RTX;
2526 	    gcc_assert (last_action == -1);
2527 	    last_action = -3;
2528 	  }
2529 	/* Force closing of current EH region before section switch and
2530 	   opening a new one afterwards.  */
2531 	else if (last_action != -3)
2532 	  last_landing_pad = pc_rtx;
2533 	call_site_base += VEC_length (call_site_record,
2534 				      crtl->eh.call_site_record[cur_sec]);
2535 	cur_sec++;
2536 	gcc_assert (crtl->eh.call_site_record[cur_sec] == NULL);
2537 	crtl->eh.call_site_record[cur_sec]
2538 	  = VEC_alloc (call_site_record, gc, 10);
2539       }
2540 
2541   if (last_action >= -1 && ! first_no_action_insn)
2542     {
2543       note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
2544       NOTE_EH_HANDLER (note) = call_site;
2545     }
2546 
2547   call_site_base = saved_call_site_base;
2548 
2549   htab_delete (ar_hash);
2550   return 0;
2551 }
2552 
2553 static bool
2554 gate_convert_to_eh_region_ranges (void)
2555 {
2556   /* Nothing to do for SJLJ exceptions or if no regions created.  */
2557   if (cfun->eh->region_tree == NULL)
2558     return false;
2559   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2560     return false;
2561   return true;
2562 }
2563 
2564 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
2565 {
2566  {
2567   RTL_PASS,
2568   "eh_ranges",                          /* name */
2569   gate_convert_to_eh_region_ranges,	/* gate */
2570   convert_to_eh_region_ranges,          /* execute */
2571   NULL,                                 /* sub */
2572   NULL,                                 /* next */
2573   0,                                    /* static_pass_number */
2574   TV_NONE,                              /* tv_id */
2575   0,                                    /* properties_required */
2576   0,                                    /* properties_provided */
2577   0,                                    /* properties_destroyed */
2578   0,                                    /* todo_flags_start */
2579   0              			/* todo_flags_finish */
2580  }
2581 };
2582 
2583 static void
2584 push_uleb128 (VEC (uchar, gc) **data_area, unsigned int value)
2585 {
2586   do
2587     {
2588       unsigned char byte = value & 0x7f;
2589       value >>= 7;
2590       if (value)
2591 	byte |= 0x80;
2592       VEC_safe_push (uchar, gc, *data_area, byte);
2593     }
2594   while (value);
2595 }
2596 
2597 static void
2598 push_sleb128 (VEC (uchar, gc) **data_area, int value)
2599 {
2600   unsigned char byte;
2601   int more;
2602 
2603   do
2604     {
2605       byte = value & 0x7f;
2606       value >>= 7;
2607       more = ! ((value == 0 && (byte & 0x40) == 0)
2608 		|| (value == -1 && (byte & 0x40) != 0));
2609       if (more)
2610 	byte |= 0x80;
2611       VEC_safe_push (uchar, gc, *data_area, byte);
2612     }
2613   while (more);
2614 }
2615 
2616 
2617 #ifndef HAVE_AS_LEB128
2618 static int
2619 dw2_size_of_call_site_table (int section)
2620 {
2621   int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
2622   int size = n * (4 + 4 + 4);
2623   int i;
2624 
2625   for (i = 0; i < n; ++i)
2626     {
2627       struct call_site_record_d *cs =
2628 	VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
2629       size += size_of_uleb128 (cs->action);
2630     }
2631 
2632   return size;
2633 }
2634 
2635 static int
2636 sjlj_size_of_call_site_table (void)
2637 {
2638   int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
2639   int size = 0;
2640   int i;
2641 
2642   for (i = 0; i < n; ++i)
2643     {
2644       struct call_site_record_d *cs =
2645 	VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
2646       size += size_of_uleb128 (INTVAL (cs->landing_pad));
2647       size += size_of_uleb128 (cs->action);
2648     }
2649 
2650   return size;
2651 }
2652 #endif
2653 
2654 static void
2655 dw2_output_call_site_table (int cs_format, int section)
2656 {
2657   int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
2658   int i;
2659   const char *begin;
2660 
2661   if (section == 0)
2662     begin = current_function_func_begin_label;
2663   else if (first_function_block_is_cold)
2664     begin = crtl->subsections.hot_section_label;
2665   else
2666     begin = crtl->subsections.cold_section_label;
2667 
2668   for (i = 0; i < n; ++i)
2669     {
2670       struct call_site_record_d *cs =
2671 	VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
2672       char reg_start_lab[32];
2673       char reg_end_lab[32];
2674       char landing_pad_lab[32];
2675 
2676       ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2677       ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2678 
2679       if (cs->landing_pad)
2680 	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2681 				     CODE_LABEL_NUMBER (cs->landing_pad));
2682 
2683       /* ??? Perhaps use insn length scaling if the assembler supports
2684 	 generic arithmetic.  */
2685       /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2686 	 data4 if the function is small enough.  */
2687       if (cs_format == DW_EH_PE_uleb128)
2688 	{
2689 	  dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2690 					"region %d start", i);
2691 	  dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2692 					"length");
2693 	  if (cs->landing_pad)
2694 	    dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2695 					  "landing pad");
2696 	  else
2697 	    dw2_asm_output_data_uleb128 (0, "landing pad");
2698 	}
2699       else
2700 	{
2701 	  dw2_asm_output_delta (4, reg_start_lab, begin,
2702 				"region %d start", i);
2703 	  dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2704 	  if (cs->landing_pad)
2705 	    dw2_asm_output_delta (4, landing_pad_lab, begin,
2706 				  "landing pad");
2707 	  else
2708 	    dw2_asm_output_data (4, 0, "landing pad");
2709 	}
2710       dw2_asm_output_data_uleb128 (cs->action, "action");
2711     }
2712 
2713   call_site_base += n;
2714 }
2715 
2716 static void
2717 sjlj_output_call_site_table (void)
2718 {
2719   int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
2720   int i;
2721 
2722   for (i = 0; i < n; ++i)
2723     {
2724       struct call_site_record_d *cs =
2725 	VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
2726 
2727       dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2728 				   "region %d landing pad", i);
2729       dw2_asm_output_data_uleb128 (cs->action, "action");
2730     }
2731 
2732   call_site_base += n;
2733 }
2734 
2735 /* Switch to the section that should be used for exception tables.  */
2736 
2737 static void
2738 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2739 {
2740   section *s;
2741 
2742   if (exception_section)
2743     s = exception_section;
2744   else
2745     {
2746       /* Compute the section and cache it into exception_section,
2747 	 unless it depends on the function name.  */
2748       if (targetm_common.have_named_sections)
2749 	{
2750 	  int flags;
2751 
2752 	  if (EH_TABLES_CAN_BE_READ_ONLY)
2753 	    {
2754 	      int tt_format =
2755 		ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2756 	      flags = ((! flag_pic
2757 			|| ((tt_format & 0x70) != DW_EH_PE_absptr
2758 			    && (tt_format & 0x70) != DW_EH_PE_aligned))
2759 		       ? 0 : SECTION_WRITE);
2760 	    }
2761 	  else
2762 	    flags = SECTION_WRITE;
2763 
2764 #ifdef HAVE_LD_EH_GC_SECTIONS
2765 	  if (flag_function_sections)
2766 	    {
2767 	      char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2768 	      sprintf (section_name, ".gcc_except_table.%s", fnname);
2769 	      s = get_section (section_name, flags, NULL);
2770 	      free (section_name);
2771 	    }
2772 	  else
2773 #endif
2774 	    exception_section
2775 	      = s = get_section (".gcc_except_table", flags, NULL);
2776 	}
2777       else
2778 	exception_section
2779 	  = s = flag_pic ? data_section : readonly_data_section;
2780     }
2781 
2782   switch_to_section (s);
2783 }
2784 
2785 
2786 /* Output a reference from an exception table to the type_info object TYPE.
2787    TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2788    the value.  */
2789 
2790 static void
2791 output_ttype (tree type, int tt_format, int tt_format_size)
2792 {
2793   rtx value;
2794   bool is_public = true;
2795 
2796   if (type == NULL_TREE)
2797     value = const0_rtx;
2798   else
2799     {
2800       struct varpool_node *node;
2801 
2802       /* FIXME lto.  pass_ipa_free_lang_data changes all types to
2803 	 runtime types so TYPE should already be a runtime type
2804 	 reference.  When pass_ipa_free_lang data is made a default
2805 	 pass, we can then remove the call to lookup_type_for_runtime
2806 	 below.  */
2807       if (TYPE_P (type))
2808 	type = lookup_type_for_runtime (type);
2809 
2810       value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2811 
2812       /* Let cgraph know that the rtti decl is used.  Not all of the
2813 	 paths below go through assemble_integer, which would take
2814 	 care of this for us.  */
2815       STRIP_NOPS (type);
2816       if (TREE_CODE (type) == ADDR_EXPR)
2817 	{
2818 	  type = TREE_OPERAND (type, 0);
2819 	  if (TREE_CODE (type) == VAR_DECL)
2820 	    {
2821 	      node = varpool_node (type);
2822 	      if (node)
2823 		varpool_mark_needed_node (node);
2824 	      is_public = TREE_PUBLIC (type);
2825 	    }
2826 	}
2827       else
2828 	gcc_assert (TREE_CODE (type) == INTEGER_CST);
2829     }
2830 
2831   /* Allow the target to override the type table entry format.  */
2832   if (targetm.asm_out.ttype (value))
2833     return;
2834 
2835   if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2836     assemble_integer (value, tt_format_size,
2837 		      tt_format_size * BITS_PER_UNIT, 1);
2838   else
2839     dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2840 }
2841 
2842 static void
2843 output_one_function_exception_table (int section)
2844 {
2845   int tt_format, cs_format, lp_format, i;
2846 #ifdef HAVE_AS_LEB128
2847   char ttype_label[32];
2848   char cs_after_size_label[32];
2849   char cs_end_label[32];
2850 #else
2851   int call_site_len;
2852 #endif
2853   int have_tt_data;
2854   int tt_format_size = 0;
2855 
2856   have_tt_data = (VEC_length (tree, cfun->eh->ttype_data)
2857 		  || (targetm.arm_eabi_unwinder
2858 		      ? VEC_length (tree, cfun->eh->ehspec_data.arm_eabi)
2859 		      : VEC_length (uchar, cfun->eh->ehspec_data.other)));
2860 
2861   /* Indicate the format of the @TType entries.  */
2862   if (! have_tt_data)
2863     tt_format = DW_EH_PE_omit;
2864   else
2865     {
2866       tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2867 #ifdef HAVE_AS_LEB128
2868       ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2869 				   section ? "LLSDATTC" : "LLSDATT",
2870 				   current_function_funcdef_no);
2871 #endif
2872       tt_format_size = size_of_encoded_value (tt_format);
2873 
2874       assemble_align (tt_format_size * BITS_PER_UNIT);
2875     }
2876 
2877   targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2878 				  current_function_funcdef_no);
2879 
2880   /* The LSDA header.  */
2881 
2882   /* Indicate the format of the landing pad start pointer.  An omitted
2883      field implies @LPStart == @Start.  */
2884   /* Currently we always put @LPStart == @Start.  This field would
2885      be most useful in moving the landing pads completely out of
2886      line to another section, but it could also be used to minimize
2887      the size of uleb128 landing pad offsets.  */
2888   lp_format = DW_EH_PE_omit;
2889   dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
2890 		       eh_data_format_name (lp_format));
2891 
2892   /* @LPStart pointer would go here.  */
2893 
2894   dw2_asm_output_data (1, tt_format, "@TType format (%s)",
2895 		       eh_data_format_name (tt_format));
2896 
2897 #ifndef HAVE_AS_LEB128
2898   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2899     call_site_len = sjlj_size_of_call_site_table ();
2900   else
2901     call_site_len = dw2_size_of_call_site_table (section);
2902 #endif
2903 
2904   /* A pc-relative 4-byte displacement to the @TType data.  */
2905   if (have_tt_data)
2906     {
2907 #ifdef HAVE_AS_LEB128
2908       char ttype_after_disp_label[32];
2909       ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
2910 				   section ? "LLSDATTDC" : "LLSDATTD",
2911 				   current_function_funcdef_no);
2912       dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
2913 				    "@TType base offset");
2914       ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
2915 #else
2916       /* Ug.  Alignment queers things.  */
2917       unsigned int before_disp, after_disp, last_disp, disp;
2918 
2919       before_disp = 1 + 1;
2920       after_disp = (1 + size_of_uleb128 (call_site_len)
2921 		    + call_site_len
2922 		    + VEC_length (uchar, crtl->eh.action_record_data)
2923 		    + (VEC_length (tree, cfun->eh->ttype_data)
2924 		       * tt_format_size));
2925 
2926       disp = after_disp;
2927       do
2928 	{
2929 	  unsigned int disp_size, pad;
2930 
2931 	  last_disp = disp;
2932 	  disp_size = size_of_uleb128 (disp);
2933 	  pad = before_disp + disp_size + after_disp;
2934 	  if (pad % tt_format_size)
2935 	    pad = tt_format_size - (pad % tt_format_size);
2936 	  else
2937 	    pad = 0;
2938 	  disp = after_disp + pad;
2939 	}
2940       while (disp != last_disp);
2941 
2942       dw2_asm_output_data_uleb128 (disp, "@TType base offset");
2943 #endif
2944     }
2945 
2946   /* Indicate the format of the call-site offsets.  */
2947 #ifdef HAVE_AS_LEB128
2948   cs_format = DW_EH_PE_uleb128;
2949 #else
2950   cs_format = DW_EH_PE_udata4;
2951 #endif
2952   dw2_asm_output_data (1, cs_format, "call-site format (%s)",
2953 		       eh_data_format_name (cs_format));
2954 
2955 #ifdef HAVE_AS_LEB128
2956   ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
2957 			       section ? "LLSDACSBC" : "LLSDACSB",
2958 			       current_function_funcdef_no);
2959   ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
2960 			       section ? "LLSDACSEC" : "LLSDACSE",
2961 			       current_function_funcdef_no);
2962   dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
2963 				"Call-site table length");
2964   ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
2965   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2966     sjlj_output_call_site_table ();
2967   else
2968     dw2_output_call_site_table (cs_format, section);
2969   ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
2970 #else
2971   dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
2972   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2973     sjlj_output_call_site_table ();
2974   else
2975     dw2_output_call_site_table (cs_format, section);
2976 #endif
2977 
2978   /* ??? Decode and interpret the data for flag_debug_asm.  */
2979   {
2980     uchar uc;
2981     FOR_EACH_VEC_ELT (uchar, crtl->eh.action_record_data, i, uc)
2982       dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
2983   }
2984 
2985   if (have_tt_data)
2986     assemble_align (tt_format_size * BITS_PER_UNIT);
2987 
2988   i = VEC_length (tree, cfun->eh->ttype_data);
2989   while (i-- > 0)
2990     {
2991       tree type = VEC_index (tree, cfun->eh->ttype_data, i);
2992       output_ttype (type, tt_format, tt_format_size);
2993     }
2994 
2995 #ifdef HAVE_AS_LEB128
2996   if (have_tt_data)
2997       ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
2998 #endif
2999 
3000   /* ??? Decode and interpret the data for flag_debug_asm.  */
3001   if (targetm.arm_eabi_unwinder)
3002     {
3003       tree type;
3004       for (i = 0;
3005 	   VEC_iterate (tree, cfun->eh->ehspec_data.arm_eabi, i, type); ++i)
3006 	output_ttype (type, tt_format, tt_format_size);
3007     }
3008   else
3009     {
3010       uchar uc;
3011       for (i = 0;
3012 	   VEC_iterate (uchar, cfun->eh->ehspec_data.other, i, uc); ++i)
3013 	dw2_asm_output_data (1, uc,
3014 			     i ? NULL : "Exception specification table");
3015     }
3016 }
3017 
3018 void
3019 output_function_exception_table (const char *fnname)
3020 {
3021   rtx personality = get_personality_function (current_function_decl);
3022 
3023   /* Not all functions need anything.  */
3024   if (! crtl->uses_eh_lsda)
3025     return;
3026 
3027   if (personality)
3028     {
3029       assemble_external_libcall (personality);
3030 
3031       if (targetm.asm_out.emit_except_personality)
3032 	targetm.asm_out.emit_except_personality (personality);
3033     }
3034 
3035   switch_to_exception_section (fnname);
3036 
3037   /* If the target wants a label to begin the table, emit it here.  */
3038   targetm.asm_out.emit_except_table_label (asm_out_file);
3039 
3040   output_one_function_exception_table (0);
3041   if (crtl->eh.call_site_record[1] != NULL)
3042     output_one_function_exception_table (1);
3043 
3044   switch_to_section (current_function_section ());
3045 }
3046 
3047 void
3048 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3049 {
3050   fun->eh->throw_stmt_table = table;
3051 }
3052 
3053 htab_t
3054 get_eh_throw_stmt_table (struct function *fun)
3055 {
3056   return fun->eh->throw_stmt_table;
3057 }
3058 
3059 /* Determine if the function needs an EH personality function.  */
3060 
3061 enum eh_personality_kind
3062 function_needs_eh_personality (struct function *fn)
3063 {
3064   enum eh_personality_kind kind = eh_personality_none;
3065   eh_region i;
3066 
3067   FOR_ALL_EH_REGION_FN (i, fn)
3068     {
3069       switch (i->type)
3070 	{
3071 	case ERT_CLEANUP:
3072 	  /* Can do with any personality including the generic C one.  */
3073 	  kind = eh_personality_any;
3074 	  break;
3075 
3076 	case ERT_TRY:
3077 	case ERT_ALLOWED_EXCEPTIONS:
3078 	  /* Always needs a EH personality function.  The generic C
3079 	     personality doesn't handle these even for empty type lists.  */
3080 	  return eh_personality_lang;
3081 
3082 	case ERT_MUST_NOT_THROW:
3083 	  /* Always needs a EH personality function.  The language may specify
3084 	     what abort routine that must be used, e.g. std::terminate.  */
3085 	  return eh_personality_lang;
3086 	}
3087     }
3088 
3089   return kind;
3090 }
3091 
3092 /* Dump EH information to OUT.  */
3093 
3094 void
3095 dump_eh_tree (FILE * out, struct function *fun)
3096 {
3097   eh_region i;
3098   int depth = 0;
3099   static const char *const type_name[] = {
3100     "cleanup", "try", "allowed_exceptions", "must_not_throw"
3101   };
3102 
3103   i = fun->eh->region_tree;
3104   if (!i)
3105     return;
3106 
3107   fprintf (out, "Eh tree:\n");
3108   while (1)
3109     {
3110       fprintf (out, "  %*s %i %s", depth * 2, "",
3111 	       i->index, type_name[(int) i->type]);
3112 
3113       if (i->landing_pads)
3114 	{
3115 	  eh_landing_pad lp;
3116 
3117 	  fprintf (out, " land:");
3118 	  if (current_ir_type () == IR_GIMPLE)
3119 	    {
3120 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3121 		{
3122 		  fprintf (out, "{%i,", lp->index);
3123 		  print_generic_expr (out, lp->post_landing_pad, 0);
3124 		  fputc ('}', out);
3125 		  if (lp->next_lp)
3126 		    fputc (',', out);
3127 		}
3128 	    }
3129 	  else
3130 	    {
3131 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3132 		{
3133 		  fprintf (out, "{%i,", lp->index);
3134 		  if (lp->landing_pad)
3135 		    fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3136 			     NOTE_P (lp->landing_pad) ? "(del)" : "");
3137 		  else
3138 		    fprintf (out, "(nil),");
3139 		  if (lp->post_landing_pad)
3140 		    {
3141 		      rtx lab = label_rtx (lp->post_landing_pad);
3142 		      fprintf (out, "%i%s}", INSN_UID (lab),
3143 			       NOTE_P (lab) ? "(del)" : "");
3144 		    }
3145 		  else
3146 		    fprintf (out, "(nil)}");
3147 		  if (lp->next_lp)
3148 		    fputc (',', out);
3149 		}
3150 	    }
3151 	}
3152 
3153       switch (i->type)
3154 	{
3155 	case ERT_CLEANUP:
3156 	case ERT_MUST_NOT_THROW:
3157 	  break;
3158 
3159 	case ERT_TRY:
3160 	  {
3161 	    eh_catch c;
3162 	    fprintf (out, " catch:");
3163 	    for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3164 	      {
3165 		fputc ('{', out);
3166 		if (c->label)
3167 		  {
3168 		    fprintf (out, "lab:");
3169 		    print_generic_expr (out, c->label, 0);
3170 		    fputc (';', out);
3171 		  }
3172 		print_generic_expr (out, c->type_list, 0);
3173 		fputc ('}', out);
3174 		if (c->next_catch)
3175 		  fputc (',', out);
3176 	      }
3177 	  }
3178 	  break;
3179 
3180 	case ERT_ALLOWED_EXCEPTIONS:
3181 	  fprintf (out, " filter :%i types:", i->u.allowed.filter);
3182 	  print_generic_expr (out, i->u.allowed.type_list, 0);
3183 	  break;
3184 	}
3185       fputc ('\n', out);
3186 
3187       /* If there are sub-regions, process them.  */
3188       if (i->inner)
3189 	i = i->inner, depth++;
3190       /* If there are peers, process them.  */
3191       else if (i->next_peer)
3192 	i = i->next_peer;
3193       /* Otherwise, step back up the tree to the next peer.  */
3194       else
3195 	{
3196 	  do
3197 	    {
3198 	      i = i->outer;
3199 	      depth--;
3200 	      if (i == NULL)
3201 		return;
3202 	    }
3203 	  while (i->next_peer == NULL);
3204 	  i = i->next_peer;
3205 	}
3206     }
3207 }
3208 
3209 /* Dump the EH tree for FN on stderr.  */
3210 
3211 DEBUG_FUNCTION void
3212 debug_eh_tree (struct function *fn)
3213 {
3214   dump_eh_tree (stderr, fn);
3215 }
3216 
3217 /* Verify invariants on EH datastructures.  */
3218 
3219 DEBUG_FUNCTION void
3220 verify_eh_tree (struct function *fun)
3221 {
3222   eh_region r, outer;
3223   int nvisited_lp, nvisited_r;
3224   int count_lp, count_r, depth, i;
3225   eh_landing_pad lp;
3226   bool err = false;
3227 
3228   if (!fun->eh->region_tree)
3229     return;
3230 
3231   count_r = 0;
3232   for (i = 1; VEC_iterate (eh_region, fun->eh->region_array, i, r); ++i)
3233     if (r)
3234       {
3235 	if (r->index == i)
3236 	  count_r++;
3237 	else
3238 	  {
3239 	    error ("region_array is corrupted for region %i", r->index);
3240 	    err = true;
3241 	  }
3242       }
3243 
3244   count_lp = 0;
3245   for (i = 1; VEC_iterate (eh_landing_pad, fun->eh->lp_array, i, lp); ++i)
3246     if (lp)
3247       {
3248 	if (lp->index == i)
3249 	  count_lp++;
3250 	else
3251 	  {
3252 	    error ("lp_array is corrupted for lp %i", lp->index);
3253 	    err = true;
3254 	  }
3255       }
3256 
3257   depth = nvisited_lp = nvisited_r = 0;
3258   outer = NULL;
3259   r = fun->eh->region_tree;
3260   while (1)
3261     {
3262       if (VEC_index (eh_region, fun->eh->region_array, r->index) != r)
3263 	{
3264 	  error ("region_array is corrupted for region %i", r->index);
3265 	  err = true;
3266 	}
3267       if (r->outer != outer)
3268 	{
3269 	  error ("outer block of region %i is wrong", r->index);
3270 	  err = true;
3271 	}
3272       if (depth < 0)
3273 	{
3274 	  error ("negative nesting depth of region %i", r->index);
3275 	  err = true;
3276 	}
3277       nvisited_r++;
3278 
3279       for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3280 	{
3281 	  if (VEC_index (eh_landing_pad, fun->eh->lp_array, lp->index) != lp)
3282 	    {
3283 	      error ("lp_array is corrupted for lp %i", lp->index);
3284 	      err = true;
3285 	    }
3286 	  if (lp->region != r)
3287 	    {
3288 	      error ("region of lp %i is wrong", lp->index);
3289 	      err = true;
3290 	    }
3291 	  nvisited_lp++;
3292 	}
3293 
3294       if (r->inner)
3295 	outer = r, r = r->inner, depth++;
3296       else if (r->next_peer)
3297 	r = r->next_peer;
3298       else
3299 	{
3300 	  do
3301 	    {
3302 	      r = r->outer;
3303 	      if (r == NULL)
3304 		goto region_done;
3305 	      depth--;
3306 	      outer = r->outer;
3307 	    }
3308 	  while (r->next_peer == NULL);
3309 	  r = r->next_peer;
3310 	}
3311     }
3312  region_done:
3313   if (depth != 0)
3314     {
3315       error ("tree list ends on depth %i", depth);
3316       err = true;
3317     }
3318   if (count_r != nvisited_r)
3319     {
3320       error ("region_array does not match region_tree");
3321       err = true;
3322     }
3323   if (count_lp != nvisited_lp)
3324     {
3325       error ("lp_array does not match region_tree");
3326       err = true;
3327     }
3328 
3329   if (err)
3330     {
3331       dump_eh_tree (stderr, fun);
3332       internal_error ("verify_eh_tree failed");
3333     }
3334 }
3335 
3336 #include "gt-except.h"
3337