xref: /openbsd/gnu/usr.bin/gcc/gcc/except.c (revision 4e43c760)
1c87b03e5Sespie /* Implements exception handling.
2c87b03e5Sespie    Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3c87b03e5Sespie    1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4c87b03e5Sespie    Contributed by Mike Stump <mrs@cygnus.com>.
5c87b03e5Sespie 
6c87b03e5Sespie This file is part of GCC.
7c87b03e5Sespie 
8c87b03e5Sespie GCC is free software; you can redistribute it and/or modify it under
9c87b03e5Sespie the terms of the GNU General Public License as published by the Free
10c87b03e5Sespie Software Foundation; either version 2, or (at your option) any later
11c87b03e5Sespie version.
12c87b03e5Sespie 
13c87b03e5Sespie GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14c87b03e5Sespie WARRANTY; without even the implied warranty of MERCHANTABILITY or
15c87b03e5Sespie FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16c87b03e5Sespie for more details.
17c87b03e5Sespie 
18c87b03e5Sespie You should have received a copy of the GNU General Public License
19c87b03e5Sespie along with GCC; see the file COPYING.  If not, write to the Free
20c87b03e5Sespie Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21c87b03e5Sespie 02111-1307, USA.  */
22c87b03e5Sespie 
23c87b03e5Sespie 
24c87b03e5Sespie /* An exception is an event that can be signaled from within a
25c87b03e5Sespie    function. This event can then be "caught" or "trapped" by the
26c87b03e5Sespie    callers of this function. This potentially allows program flow to
27c87b03e5Sespie    be transferred to any arbitrary code associated with a function call
28c87b03e5Sespie    several levels up the stack.
29c87b03e5Sespie 
30c87b03e5Sespie    The intended use for this mechanism is for signaling "exceptional
31c87b03e5Sespie    events" in an out-of-band fashion, hence its name. The C++ language
32c87b03e5Sespie    (and many other OO-styled or functional languages) practically
33c87b03e5Sespie    requires such a mechanism, as otherwise it becomes very difficult
34c87b03e5Sespie    or even impossible to signal failure conditions in complex
35c87b03e5Sespie    situations.  The traditional C++ example is when an error occurs in
36c87b03e5Sespie    the process of constructing an object; without such a mechanism, it
37c87b03e5Sespie    is impossible to signal that the error occurs without adding global
38c87b03e5Sespie    state variables and error checks around every object construction.
39c87b03e5Sespie 
40c87b03e5Sespie    The act of causing this event to occur is referred to as "throwing
41c87b03e5Sespie    an exception". (Alternate terms include "raising an exception" or
42c87b03e5Sespie    "signaling an exception".) The term "throw" is used because control
43c87b03e5Sespie    is returned to the callers of the function that is signaling the
44c87b03e5Sespie    exception, and thus there is the concept of "throwing" the
45c87b03e5Sespie    exception up the call stack.
46c87b03e5Sespie 
47c87b03e5Sespie    [ Add updated documentation on how to use this.  ]  */
48c87b03e5Sespie 
49c87b03e5Sespie 
50c87b03e5Sespie #include "config.h"
51c87b03e5Sespie #include "system.h"
52c87b03e5Sespie #include "rtl.h"
53c87b03e5Sespie #include "tree.h"
54c87b03e5Sespie #include "flags.h"
55c87b03e5Sespie #include "function.h"
56c87b03e5Sespie #include "expr.h"
57c87b03e5Sespie #include "libfuncs.h"
58c87b03e5Sespie #include "insn-config.h"
59c87b03e5Sespie #include "except.h"
60c87b03e5Sespie #include "integrate.h"
61c87b03e5Sespie #include "hard-reg-set.h"
62c87b03e5Sespie #include "basic-block.h"
63c87b03e5Sespie #include "output.h"
64c87b03e5Sespie #include "dwarf2asm.h"
65c87b03e5Sespie #include "dwarf2out.h"
66c87b03e5Sespie #include "dwarf2.h"
67c87b03e5Sespie #include "toplev.h"
68c87b03e5Sespie #include "hashtab.h"
69c87b03e5Sespie #include "intl.h"
70c87b03e5Sespie #include "ggc.h"
71c87b03e5Sespie #include "tm_p.h"
72c87b03e5Sespie #include "target.h"
73c87b03e5Sespie #include "langhooks.h"
74c87b03e5Sespie 
75c87b03e5Sespie /* Provide defaults for stuff that may not be defined when using
76c87b03e5Sespie    sjlj exceptions.  */
77c87b03e5Sespie #ifndef EH_RETURN_DATA_REGNO
78c87b03e5Sespie #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
79c87b03e5Sespie #endif
80c87b03e5Sespie 
81c87b03e5Sespie 
82c87b03e5Sespie /* Nonzero means enable synchronous exceptions for non-call instructions.  */
83c87b03e5Sespie int flag_non_call_exceptions;
84c87b03e5Sespie 
85c87b03e5Sespie /* Protect cleanup actions with must-not-throw regions, with a call
86c87b03e5Sespie    to the given failure handler.  */
87c87b03e5Sespie tree (*lang_protect_cleanup_actions) PARAMS ((void));
88c87b03e5Sespie 
89c87b03e5Sespie /* Return true if type A catches type B.  */
90c87b03e5Sespie int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
91c87b03e5Sespie 
92c87b03e5Sespie /* Map a type to a runtime object to match type.  */
93c87b03e5Sespie tree (*lang_eh_runtime_type) PARAMS ((tree));
94c87b03e5Sespie 
95c87b03e5Sespie /* A hash table of label to region number.  */
96c87b03e5Sespie 
97c87b03e5Sespie struct ehl_map_entry GTY(())
98c87b03e5Sespie {
99c87b03e5Sespie   rtx label;
100c87b03e5Sespie   struct eh_region *region;
101c87b03e5Sespie };
102c87b03e5Sespie 
103c87b03e5Sespie static int call_site_base;
104c87b03e5Sespie static GTY ((param_is (union tree_node)))
105c87b03e5Sespie   htab_t type_to_runtime_map;
106c87b03e5Sespie 
107c87b03e5Sespie /* Describe the SjLj_Function_Context structure.  */
108c87b03e5Sespie static GTY(()) tree sjlj_fc_type_node;
109c87b03e5Sespie static int sjlj_fc_call_site_ofs;
110c87b03e5Sespie static int sjlj_fc_data_ofs;
111c87b03e5Sespie static int sjlj_fc_personality_ofs;
112c87b03e5Sespie static int sjlj_fc_lsda_ofs;
113c87b03e5Sespie static int sjlj_fc_jbuf_ofs;
114c87b03e5Sespie 
115c87b03e5Sespie /* Describes one exception region.  */
116c87b03e5Sespie struct eh_region GTY(())
117c87b03e5Sespie {
118c87b03e5Sespie   /* The immediately surrounding region.  */
119c87b03e5Sespie   struct eh_region *outer;
120c87b03e5Sespie 
121c87b03e5Sespie   /* The list of immediately contained regions.  */
122c87b03e5Sespie   struct eh_region *inner;
123c87b03e5Sespie   struct eh_region *next_peer;
124c87b03e5Sespie 
125c87b03e5Sespie   /* An identifier for this region.  */
126c87b03e5Sespie   int region_number;
127c87b03e5Sespie 
128c87b03e5Sespie   /* When a region is deleted, its parents inherit the REG_EH_REGION
129c87b03e5Sespie      numbers already assigned.  */
130c87b03e5Sespie   bitmap aka;
131c87b03e5Sespie 
132c87b03e5Sespie   /* Each region does exactly one thing.  */
133c87b03e5Sespie   enum eh_region_type
134c87b03e5Sespie   {
135c87b03e5Sespie     ERT_UNKNOWN = 0,
136c87b03e5Sespie     ERT_CLEANUP,
137c87b03e5Sespie     ERT_TRY,
138c87b03e5Sespie     ERT_CATCH,
139c87b03e5Sespie     ERT_ALLOWED_EXCEPTIONS,
140c87b03e5Sespie     ERT_MUST_NOT_THROW,
141c87b03e5Sespie     ERT_THROW,
142c87b03e5Sespie     ERT_FIXUP
143c87b03e5Sespie   } type;
144c87b03e5Sespie 
145c87b03e5Sespie   /* Holds the action to perform based on the preceding type.  */
146c87b03e5Sespie   union eh_region_u {
147c87b03e5Sespie     /* A list of catch blocks, a surrounding try block,
148c87b03e5Sespie        and the label for continuing after a catch.  */
149c87b03e5Sespie     struct eh_region_u_try {
150c87b03e5Sespie       struct eh_region *catch;
151c87b03e5Sespie       struct eh_region *last_catch;
152c87b03e5Sespie       struct eh_region *prev_try;
153c87b03e5Sespie       rtx continue_label;
154c87b03e5Sespie     } GTY ((tag ("ERT_TRY"))) try;
155c87b03e5Sespie 
156c87b03e5Sespie     /* The list through the catch handlers, the list of type objects
157c87b03e5Sespie        matched, and the list of associated filters.  */
158c87b03e5Sespie     struct eh_region_u_catch {
159c87b03e5Sespie       struct eh_region *next_catch;
160c87b03e5Sespie       struct eh_region *prev_catch;
161c87b03e5Sespie       tree type_list;
162c87b03e5Sespie       tree filter_list;
163c87b03e5Sespie     } GTY ((tag ("ERT_CATCH"))) catch;
164c87b03e5Sespie 
165c87b03e5Sespie     /* A tree_list of allowed types.  */
166c87b03e5Sespie     struct eh_region_u_allowed {
167c87b03e5Sespie       tree type_list;
168c87b03e5Sespie       int filter;
169c87b03e5Sespie     } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
170c87b03e5Sespie 
171c87b03e5Sespie     /* The type given by a call to "throw foo();", or discovered
172c87b03e5Sespie        for a throw.  */
173c87b03e5Sespie     struct eh_region_u_throw {
174c87b03e5Sespie       tree type;
175c87b03e5Sespie     } GTY ((tag ("ERT_THROW"))) throw;
176c87b03e5Sespie 
177c87b03e5Sespie     /* Retain the cleanup expression even after expansion so that
178c87b03e5Sespie        we can match up fixup regions.  */
179c87b03e5Sespie     struct eh_region_u_cleanup {
180c87b03e5Sespie       tree exp;
181c87b03e5Sespie       struct eh_region *prev_try;
182c87b03e5Sespie     } GTY ((tag ("ERT_CLEANUP"))) cleanup;
183c87b03e5Sespie 
184c87b03e5Sespie     /* The real region (by expression and by pointer) that fixup code
185c87b03e5Sespie        should live in.  */
186c87b03e5Sespie     struct eh_region_u_fixup {
187c87b03e5Sespie       tree cleanup_exp;
188c87b03e5Sespie       struct eh_region *real_region;
189c87b03e5Sespie     } GTY ((tag ("ERT_FIXUP"))) fixup;
190c87b03e5Sespie   } GTY ((desc ("%0.type"))) u;
191c87b03e5Sespie 
192c87b03e5Sespie   /* Entry point for this region's handler before landing pads are built.  */
193c87b03e5Sespie   rtx label;
194c87b03e5Sespie 
195c87b03e5Sespie   /* Entry point for this region's handler from the runtime eh library.  */
196c87b03e5Sespie   rtx landing_pad;
197c87b03e5Sespie 
198c87b03e5Sespie   /* Entry point for this region's handler from an inner region.  */
199c87b03e5Sespie   rtx post_landing_pad;
200c87b03e5Sespie 
201c87b03e5Sespie   /* The RESX insn for handing off control to the next outermost handler,
202c87b03e5Sespie      if appropriate.  */
203c87b03e5Sespie   rtx resume;
204c87b03e5Sespie 
205c87b03e5Sespie   /* True if something in this region may throw.  */
206c87b03e5Sespie   unsigned may_contain_throw : 1;
207c87b03e5Sespie };
208c87b03e5Sespie 
209c87b03e5Sespie struct call_site_record GTY(())
210c87b03e5Sespie {
211c87b03e5Sespie   rtx landing_pad;
212c87b03e5Sespie   int action;
213c87b03e5Sespie };
214c87b03e5Sespie 
215c87b03e5Sespie /* Used to save exception status for each function.  */
216c87b03e5Sespie struct eh_status GTY(())
217c87b03e5Sespie {
218c87b03e5Sespie   /* The tree of all regions for this function.  */
219c87b03e5Sespie   struct eh_region *region_tree;
220c87b03e5Sespie 
221c87b03e5Sespie   /* The same information as an indexable array.  */
222c87b03e5Sespie   struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
223c87b03e5Sespie 
224c87b03e5Sespie   /* The most recently open region.  */
225c87b03e5Sespie   struct eh_region *cur_region;
226c87b03e5Sespie 
227c87b03e5Sespie   /* This is the region for which we are processing catch blocks.  */
228c87b03e5Sespie   struct eh_region *try_region;
229c87b03e5Sespie 
230c87b03e5Sespie   rtx filter;
231c87b03e5Sespie   rtx exc_ptr;
232c87b03e5Sespie 
233c87b03e5Sespie   int built_landing_pads;
234c87b03e5Sespie   int last_region_number;
235c87b03e5Sespie 
236c87b03e5Sespie   varray_type ttype_data;
237c87b03e5Sespie   varray_type ehspec_data;
238c87b03e5Sespie   varray_type action_record_data;
239c87b03e5Sespie 
240c87b03e5Sespie   htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
241c87b03e5Sespie 
242c87b03e5Sespie   struct call_site_record * GTY ((length ("%h.call_site_data_used")))
243c87b03e5Sespie     call_site_data;
244c87b03e5Sespie   int call_site_data_used;
245c87b03e5Sespie   int call_site_data_size;
246c87b03e5Sespie 
247c87b03e5Sespie   rtx ehr_stackadj;
248c87b03e5Sespie   rtx ehr_handler;
249c87b03e5Sespie   rtx ehr_label;
250c87b03e5Sespie 
251c87b03e5Sespie   rtx sjlj_fc;
252c87b03e5Sespie   rtx sjlj_exit_after;
253c87b03e5Sespie };
254c87b03e5Sespie 
255c87b03e5Sespie 
256c87b03e5Sespie static int t2r_eq				PARAMS ((const PTR,
257c87b03e5Sespie 							 const PTR));
258c87b03e5Sespie static hashval_t t2r_hash			PARAMS ((const PTR));
259c87b03e5Sespie static void add_type_for_runtime		PARAMS ((tree));
260c87b03e5Sespie static tree lookup_type_for_runtime		PARAMS ((tree));
261c87b03e5Sespie 
262c87b03e5Sespie static struct eh_region *expand_eh_region_end	PARAMS ((void));
263c87b03e5Sespie 
264c87b03e5Sespie static rtx get_exception_filter			PARAMS ((struct function *));
265c87b03e5Sespie 
266c87b03e5Sespie static void collect_eh_region_array		PARAMS ((void));
267c87b03e5Sespie static void resolve_fixup_regions		PARAMS ((void));
268c87b03e5Sespie static void remove_fixup_regions		PARAMS ((void));
269c87b03e5Sespie static void remove_unreachable_regions		PARAMS ((rtx));
270c87b03e5Sespie static void convert_from_eh_region_ranges_1	PARAMS ((rtx *, int *, int));
271c87b03e5Sespie 
272c87b03e5Sespie static struct eh_region *duplicate_eh_region_1	PARAMS ((struct eh_region *,
273c87b03e5Sespie 						     struct inline_remap *));
274c87b03e5Sespie static void duplicate_eh_region_2		PARAMS ((struct eh_region *,
275c87b03e5Sespie 							 struct eh_region **));
276c87b03e5Sespie static int ttypes_filter_eq			PARAMS ((const PTR,
277c87b03e5Sespie 							 const PTR));
278c87b03e5Sespie static hashval_t ttypes_filter_hash		PARAMS ((const PTR));
279c87b03e5Sespie static int ehspec_filter_eq			PARAMS ((const PTR,
280c87b03e5Sespie 							 const PTR));
281c87b03e5Sespie static hashval_t ehspec_filter_hash		PARAMS ((const PTR));
282c87b03e5Sespie static int add_ttypes_entry			PARAMS ((htab_t, tree));
283c87b03e5Sespie static int add_ehspec_entry			PARAMS ((htab_t, htab_t,
284c87b03e5Sespie 							 tree));
285c87b03e5Sespie static void assign_filter_values		PARAMS ((void));
286c87b03e5Sespie static void build_post_landing_pads		PARAMS ((void));
287c87b03e5Sespie static void connect_post_landing_pads		PARAMS ((void));
288c87b03e5Sespie static void dw2_build_landing_pads		PARAMS ((void));
289c87b03e5Sespie 
290c87b03e5Sespie struct sjlj_lp_info;
291c87b03e5Sespie static bool sjlj_find_directly_reachable_regions
292c87b03e5Sespie      PARAMS ((struct sjlj_lp_info *));
293c87b03e5Sespie static void sjlj_assign_call_site_values
294c87b03e5Sespie      PARAMS ((rtx, struct sjlj_lp_info *));
295c87b03e5Sespie static void sjlj_mark_call_sites
296c87b03e5Sespie      PARAMS ((struct sjlj_lp_info *));
297c87b03e5Sespie static void sjlj_emit_function_enter		PARAMS ((rtx));
298c87b03e5Sespie static void sjlj_emit_function_exit		PARAMS ((void));
299c87b03e5Sespie static void sjlj_emit_dispatch_table
300c87b03e5Sespie      PARAMS ((rtx, struct sjlj_lp_info *));
301c87b03e5Sespie static void sjlj_build_landing_pads		PARAMS ((void));
302c87b03e5Sespie 
303c87b03e5Sespie static hashval_t ehl_hash			PARAMS ((const PTR));
304c87b03e5Sespie static int ehl_eq				PARAMS ((const PTR,
305c87b03e5Sespie 							 const PTR));
306c87b03e5Sespie static void add_ehl_entry			PARAMS ((rtx,
307c87b03e5Sespie 							 struct eh_region *));
308c87b03e5Sespie static void remove_exception_handler_label	PARAMS ((rtx));
309c87b03e5Sespie static void remove_eh_handler			PARAMS ((struct eh_region *));
310c87b03e5Sespie static int for_each_eh_label_1			PARAMS ((PTR *, PTR));
311c87b03e5Sespie 
312c87b03e5Sespie struct reachable_info;
313c87b03e5Sespie 
314c87b03e5Sespie /* The return value of reachable_next_level.  */
315c87b03e5Sespie enum reachable_code
316c87b03e5Sespie {
317c87b03e5Sespie   /* The given exception is not processed by the given region.  */
318c87b03e5Sespie   RNL_NOT_CAUGHT,
319c87b03e5Sespie   /* The given exception may need processing by the given region.  */
320c87b03e5Sespie   RNL_MAYBE_CAUGHT,
321c87b03e5Sespie   /* The given exception is completely processed by the given region.  */
322c87b03e5Sespie   RNL_CAUGHT,
323c87b03e5Sespie   /* The given exception is completely processed by the runtime.  */
324c87b03e5Sespie   RNL_BLOCKED
325c87b03e5Sespie };
326c87b03e5Sespie 
327c87b03e5Sespie static int check_handled			PARAMS ((tree, tree));
328c87b03e5Sespie static void add_reachable_handler
329c87b03e5Sespie      PARAMS ((struct reachable_info *, struct eh_region *,
330c87b03e5Sespie 	      struct eh_region *));
331c87b03e5Sespie static enum reachable_code reachable_next_level
332c87b03e5Sespie      PARAMS ((struct eh_region *, tree, struct reachable_info *));
333c87b03e5Sespie 
334c87b03e5Sespie static int action_record_eq			PARAMS ((const PTR,
335c87b03e5Sespie 							 const PTR));
336c87b03e5Sespie static hashval_t action_record_hash		PARAMS ((const PTR));
337c87b03e5Sespie static int add_action_record			PARAMS ((htab_t, int, int));
338c87b03e5Sespie static int collect_one_action_chain		PARAMS ((htab_t,
339c87b03e5Sespie 							 struct eh_region *));
340c87b03e5Sespie static int add_call_site			PARAMS ((rtx, int));
341c87b03e5Sespie 
342c87b03e5Sespie static void push_uleb128			PARAMS ((varray_type *,
343c87b03e5Sespie 							 unsigned int));
344c87b03e5Sespie static void push_sleb128			PARAMS ((varray_type *, int));
345c87b03e5Sespie #ifndef HAVE_AS_LEB128
346c87b03e5Sespie static int dw2_size_of_call_site_table		PARAMS ((void));
347c87b03e5Sespie static int sjlj_size_of_call_site_table		PARAMS ((void));
348c87b03e5Sespie #endif
349c87b03e5Sespie static void dw2_output_call_site_table		PARAMS ((void));
350c87b03e5Sespie static void sjlj_output_call_site_table		PARAMS ((void));
351c87b03e5Sespie 
352c87b03e5Sespie 
353c87b03e5Sespie /* Routine to see if exception handling is turned on.
354c87b03e5Sespie    DO_WARN is nonzero if we want to inform the user that exception
355c87b03e5Sespie    handling is turned off.
356c87b03e5Sespie 
357c87b03e5Sespie    This is used to ensure that -fexceptions has been specified if the
358c87b03e5Sespie    compiler tries to use any exception-specific functions.  */
359c87b03e5Sespie 
360c87b03e5Sespie int
doing_eh(do_warn)361c87b03e5Sespie doing_eh (do_warn)
362c87b03e5Sespie      int do_warn;
363c87b03e5Sespie {
364c87b03e5Sespie   if (! flag_exceptions)
365c87b03e5Sespie     {
366c87b03e5Sespie       static int warned = 0;
367c87b03e5Sespie       if (! warned && do_warn)
368c87b03e5Sespie 	{
369c87b03e5Sespie 	  error ("exception handling disabled, use -fexceptions to enable");
370c87b03e5Sespie 	  warned = 1;
371c87b03e5Sespie 	}
372c87b03e5Sespie       return 0;
373c87b03e5Sespie     }
374c87b03e5Sespie   return 1;
375c87b03e5Sespie }
376c87b03e5Sespie 
377c87b03e5Sespie 
378c87b03e5Sespie void
init_eh()379c87b03e5Sespie init_eh ()
380c87b03e5Sespie {
381c87b03e5Sespie   if (! flag_exceptions)
382c87b03e5Sespie     return;
383c87b03e5Sespie 
384c87b03e5Sespie   type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
385c87b03e5Sespie 
386c87b03e5Sespie   /* Create the SjLj_Function_Context structure.  This should match
387c87b03e5Sespie      the definition in unwind-sjlj.c.  */
388c87b03e5Sespie   if (USING_SJLJ_EXCEPTIONS)
389c87b03e5Sespie     {
390c87b03e5Sespie       tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
391c87b03e5Sespie 
392c87b03e5Sespie       sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
393c87b03e5Sespie 
394c87b03e5Sespie       f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
395c87b03e5Sespie 			   build_pointer_type (sjlj_fc_type_node));
396c87b03e5Sespie       DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
397c87b03e5Sespie 
398c87b03e5Sespie       f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
399c87b03e5Sespie 			 integer_type_node);
400c87b03e5Sespie       DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
401c87b03e5Sespie 
402c87b03e5Sespie       tmp = build_index_type (build_int_2 (4 - 1, 0));
403c87b03e5Sespie       tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
404c87b03e5Sespie 			      tmp);
405c87b03e5Sespie       f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
406c87b03e5Sespie       DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
407c87b03e5Sespie 
408c87b03e5Sespie       f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
409c87b03e5Sespie 			  ptr_type_node);
410c87b03e5Sespie       DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
411c87b03e5Sespie 
412c87b03e5Sespie       f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
413c87b03e5Sespie 			   ptr_type_node);
414c87b03e5Sespie       DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
415c87b03e5Sespie 
416c87b03e5Sespie #ifdef DONT_USE_BUILTIN_SETJMP
417c87b03e5Sespie #ifdef JMP_BUF_SIZE
418c87b03e5Sespie       tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
419c87b03e5Sespie #else
420c87b03e5Sespie       /* Should be large enough for most systems, if it is not,
421c87b03e5Sespie 	 JMP_BUF_SIZE should be defined with the proper value.  It will
422c87b03e5Sespie 	 also tend to be larger than necessary for most systems, a more
423c87b03e5Sespie 	 optimal port will define JMP_BUF_SIZE.  */
424c87b03e5Sespie       tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
425c87b03e5Sespie #endif
426c87b03e5Sespie #else
427c87b03e5Sespie       /* This is 2 for builtin_setjmp, plus whatever the target requires
428c87b03e5Sespie 	 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL).  */
429c87b03e5Sespie       tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
430c87b03e5Sespie 			  / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
431c87b03e5Sespie #endif
432c87b03e5Sespie       tmp = build_index_type (tmp);
433c87b03e5Sespie       tmp = build_array_type (ptr_type_node, tmp);
434c87b03e5Sespie       f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
435c87b03e5Sespie #ifdef DONT_USE_BUILTIN_SETJMP
436c87b03e5Sespie       /* We don't know what the alignment requirements of the
437c87b03e5Sespie 	 runtime's jmp_buf has.  Overestimate.  */
438c87b03e5Sespie       DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
439c87b03e5Sespie       DECL_USER_ALIGN (f_jbuf) = 1;
440c87b03e5Sespie #endif
441c87b03e5Sespie       DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
442c87b03e5Sespie 
443c87b03e5Sespie       TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
444c87b03e5Sespie       TREE_CHAIN (f_prev) = f_cs;
445c87b03e5Sespie       TREE_CHAIN (f_cs) = f_data;
446c87b03e5Sespie       TREE_CHAIN (f_data) = f_per;
447c87b03e5Sespie       TREE_CHAIN (f_per) = f_lsda;
448c87b03e5Sespie       TREE_CHAIN (f_lsda) = f_jbuf;
449c87b03e5Sespie 
450c87b03e5Sespie       layout_type (sjlj_fc_type_node);
451c87b03e5Sespie 
452c87b03e5Sespie       /* Cache the interesting field offsets so that we have
453c87b03e5Sespie 	 easy access from rtl.  */
454c87b03e5Sespie       sjlj_fc_call_site_ofs
455c87b03e5Sespie 	= (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
456c87b03e5Sespie 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
457c87b03e5Sespie       sjlj_fc_data_ofs
458c87b03e5Sespie 	= (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
459c87b03e5Sespie 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
460c87b03e5Sespie       sjlj_fc_personality_ofs
461c87b03e5Sespie 	= (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
462c87b03e5Sespie 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
463c87b03e5Sespie       sjlj_fc_lsda_ofs
464c87b03e5Sespie 	= (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
465c87b03e5Sespie 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
466c87b03e5Sespie       sjlj_fc_jbuf_ofs
467c87b03e5Sespie 	= (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
468c87b03e5Sespie 	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
469c87b03e5Sespie     }
470c87b03e5Sespie }
471c87b03e5Sespie 
472c87b03e5Sespie void
init_eh_for_function()473c87b03e5Sespie init_eh_for_function ()
474c87b03e5Sespie {
475c87b03e5Sespie   cfun->eh = (struct eh_status *)
476c87b03e5Sespie     ggc_alloc_cleared (sizeof (struct eh_status));
477c87b03e5Sespie }
478c87b03e5Sespie 
479c87b03e5Sespie /* Start an exception handling region.  All instructions emitted
480c87b03e5Sespie    after this point are considered to be part of the region until
481c87b03e5Sespie    expand_eh_region_end is invoked.  */
482c87b03e5Sespie 
483c87b03e5Sespie void
expand_eh_region_start()484c87b03e5Sespie expand_eh_region_start ()
485c87b03e5Sespie {
486c87b03e5Sespie   struct eh_region *new_region;
487c87b03e5Sespie   struct eh_region *cur_region;
488c87b03e5Sespie   rtx note;
489c87b03e5Sespie 
490c87b03e5Sespie   if (! doing_eh (0))
491c87b03e5Sespie     return;
492c87b03e5Sespie 
493c87b03e5Sespie   /* Insert a new blank region as a leaf in the tree.  */
494c87b03e5Sespie   new_region = (struct eh_region *) ggc_alloc_cleared (sizeof (*new_region));
495c87b03e5Sespie   cur_region = cfun->eh->cur_region;
496c87b03e5Sespie   new_region->outer = cur_region;
497c87b03e5Sespie   if (cur_region)
498c87b03e5Sespie     {
499c87b03e5Sespie       new_region->next_peer = cur_region->inner;
500c87b03e5Sespie       cur_region->inner = new_region;
501c87b03e5Sespie     }
502c87b03e5Sespie   else
503c87b03e5Sespie     {
504c87b03e5Sespie       new_region->next_peer = cfun->eh->region_tree;
505c87b03e5Sespie       cfun->eh->region_tree = new_region;
506c87b03e5Sespie     }
507c87b03e5Sespie   cfun->eh->cur_region = new_region;
508c87b03e5Sespie 
509c87b03e5Sespie   /* Create a note marking the start of this region.  */
510c87b03e5Sespie   new_region->region_number = ++cfun->eh->last_region_number;
511c87b03e5Sespie   note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
512c87b03e5Sespie   NOTE_EH_HANDLER (note) = new_region->region_number;
513c87b03e5Sespie }
514c87b03e5Sespie 
515c87b03e5Sespie /* Common code to end a region.  Returns the region just ended.  */
516c87b03e5Sespie 
517c87b03e5Sespie static struct eh_region *
expand_eh_region_end()518c87b03e5Sespie expand_eh_region_end ()
519c87b03e5Sespie {
520c87b03e5Sespie   struct eh_region *cur_region = cfun->eh->cur_region;
521c87b03e5Sespie   rtx note;
522c87b03e5Sespie 
523c87b03e5Sespie   /* Create a note marking the end of this region.  */
524c87b03e5Sespie   note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
525c87b03e5Sespie   NOTE_EH_HANDLER (note) = cur_region->region_number;
526c87b03e5Sespie 
527c87b03e5Sespie   /* Pop.  */
528c87b03e5Sespie   cfun->eh->cur_region = cur_region->outer;
529c87b03e5Sespie 
530c87b03e5Sespie   return cur_region;
531c87b03e5Sespie }
532c87b03e5Sespie 
533c87b03e5Sespie /* End an exception handling region for a cleanup.  HANDLER is an
534c87b03e5Sespie    expression to expand for the cleanup.  */
535c87b03e5Sespie 
536c87b03e5Sespie void
expand_eh_region_end_cleanup(handler)537c87b03e5Sespie expand_eh_region_end_cleanup (handler)
538c87b03e5Sespie      tree handler;
539c87b03e5Sespie {
540c87b03e5Sespie   struct eh_region *region;
541c87b03e5Sespie   tree protect_cleanup_actions;
542c87b03e5Sespie   rtx around_label;
543c87b03e5Sespie   rtx data_save[2];
544c87b03e5Sespie 
545c87b03e5Sespie   if (! doing_eh (0))
546c87b03e5Sespie     return;
547c87b03e5Sespie 
548c87b03e5Sespie   region = expand_eh_region_end ();
549c87b03e5Sespie   region->type = ERT_CLEANUP;
550c87b03e5Sespie   region->label = gen_label_rtx ();
551c87b03e5Sespie   region->u.cleanup.exp = handler;
552c87b03e5Sespie   region->u.cleanup.prev_try = cfun->eh->try_region;
553c87b03e5Sespie 
554c87b03e5Sespie   around_label = gen_label_rtx ();
555c87b03e5Sespie   emit_jump (around_label);
556c87b03e5Sespie 
557c87b03e5Sespie   emit_label (region->label);
558c87b03e5Sespie 
559c87b03e5Sespie   if (flag_non_call_exceptions || region->may_contain_throw)
560c87b03e5Sespie     {
561c87b03e5Sespie       /* Give the language a chance to specify an action to be taken if an
562c87b03e5Sespie 	 exception is thrown that would propagate out of the HANDLER.  */
563c87b03e5Sespie       protect_cleanup_actions
564c87b03e5Sespie 	= (lang_protect_cleanup_actions
565c87b03e5Sespie 	   ? (*lang_protect_cleanup_actions) ()
566c87b03e5Sespie 	   : NULL_TREE);
567c87b03e5Sespie 
568c87b03e5Sespie       if (protect_cleanup_actions)
569c87b03e5Sespie 	expand_eh_region_start ();
570c87b03e5Sespie 
571c87b03e5Sespie       /* In case this cleanup involves an inline destructor with a try block in
572c87b03e5Sespie 	 it, we need to save the EH return data registers around it.  */
573c87b03e5Sespie       data_save[0] = gen_reg_rtx (ptr_mode);
574c87b03e5Sespie       emit_move_insn (data_save[0], get_exception_pointer (cfun));
575c87b03e5Sespie       data_save[1] = gen_reg_rtx (word_mode);
576c87b03e5Sespie       emit_move_insn (data_save[1], get_exception_filter (cfun));
577c87b03e5Sespie 
578c87b03e5Sespie       expand_expr (handler, const0_rtx, VOIDmode, 0);
579c87b03e5Sespie 
580c87b03e5Sespie       emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
581c87b03e5Sespie       emit_move_insn (cfun->eh->filter, data_save[1]);
582c87b03e5Sespie 
583c87b03e5Sespie       if (protect_cleanup_actions)
584c87b03e5Sespie 	expand_eh_region_end_must_not_throw (protect_cleanup_actions);
585c87b03e5Sespie 
586c87b03e5Sespie       /* We need any stack adjustment complete before the around_label.  */
587c87b03e5Sespie       do_pending_stack_adjust ();
588c87b03e5Sespie     }
589c87b03e5Sespie 
590c87b03e5Sespie   /* We delay the generation of the _Unwind_Resume until we generate
591c87b03e5Sespie      landing pads.  We emit a marker here so as to get good control
592c87b03e5Sespie      flow data in the meantime.  */
593c87b03e5Sespie   region->resume
594c87b03e5Sespie     = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
595c87b03e5Sespie   emit_barrier ();
596c87b03e5Sespie 
597c87b03e5Sespie   emit_label (around_label);
598c87b03e5Sespie }
599c87b03e5Sespie 
600c87b03e5Sespie /* End an exception handling region for a try block, and prepares
601c87b03e5Sespie    for subsequent calls to expand_start_catch.  */
602c87b03e5Sespie 
603c87b03e5Sespie void
expand_start_all_catch()604c87b03e5Sespie expand_start_all_catch ()
605c87b03e5Sespie {
606c87b03e5Sespie   struct eh_region *region;
607c87b03e5Sespie 
608c87b03e5Sespie   if (! doing_eh (1))
609c87b03e5Sespie     return;
610c87b03e5Sespie 
611c87b03e5Sespie   region = expand_eh_region_end ();
612c87b03e5Sespie   region->type = ERT_TRY;
613c87b03e5Sespie   region->u.try.prev_try = cfun->eh->try_region;
614c87b03e5Sespie   region->u.try.continue_label = gen_label_rtx ();
615c87b03e5Sespie 
616c87b03e5Sespie   cfun->eh->try_region = region;
617c87b03e5Sespie 
618c87b03e5Sespie   emit_jump (region->u.try.continue_label);
619c87b03e5Sespie }
620c87b03e5Sespie 
621c87b03e5Sespie /* Begin a catch clause.  TYPE is the type caught, a list of such types, or
622c87b03e5Sespie    null if this is a catch-all clause. Providing a type list enables to
623c87b03e5Sespie    associate the catch region with potentially several exception types, which
624c87b03e5Sespie    is useful e.g. for Ada.  */
625c87b03e5Sespie 
626c87b03e5Sespie void
expand_start_catch(type_or_list)627c87b03e5Sespie expand_start_catch (type_or_list)
628c87b03e5Sespie      tree type_or_list;
629c87b03e5Sespie {
630c87b03e5Sespie   struct eh_region *t, *c, *l;
631c87b03e5Sespie   tree type_list;
632c87b03e5Sespie 
633c87b03e5Sespie   if (! doing_eh (0))
634c87b03e5Sespie     return;
635c87b03e5Sespie 
636c87b03e5Sespie   type_list = type_or_list;
637c87b03e5Sespie 
638c87b03e5Sespie   if (type_or_list)
639c87b03e5Sespie     {
640c87b03e5Sespie       /* Ensure to always end up with a type list to normalize further
641c87b03e5Sespie          processing, then register each type against the runtime types
642c87b03e5Sespie          map.  */
643c87b03e5Sespie       tree type_node;
644c87b03e5Sespie 
645c87b03e5Sespie       if (TREE_CODE (type_or_list) != TREE_LIST)
646c87b03e5Sespie 	type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
647c87b03e5Sespie 
648c87b03e5Sespie       type_node = type_list;
649c87b03e5Sespie       for (; type_node; type_node = TREE_CHAIN (type_node))
650c87b03e5Sespie 	add_type_for_runtime (TREE_VALUE (type_node));
651c87b03e5Sespie     }
652c87b03e5Sespie 
653c87b03e5Sespie   expand_eh_region_start ();
654c87b03e5Sespie 
655c87b03e5Sespie   t = cfun->eh->try_region;
656c87b03e5Sespie   c = cfun->eh->cur_region;
657c87b03e5Sespie   c->type = ERT_CATCH;
658c87b03e5Sespie   c->u.catch.type_list = type_list;
659c87b03e5Sespie   c->label = gen_label_rtx ();
660c87b03e5Sespie 
661c87b03e5Sespie   l = t->u.try.last_catch;
662c87b03e5Sespie   c->u.catch.prev_catch = l;
663c87b03e5Sespie   if (l)
664c87b03e5Sespie     l->u.catch.next_catch = c;
665c87b03e5Sespie   else
666c87b03e5Sespie     t->u.try.catch = c;
667c87b03e5Sespie   t->u.try.last_catch = c;
668c87b03e5Sespie 
669c87b03e5Sespie   emit_label (c->label);
670c87b03e5Sespie }
671c87b03e5Sespie 
672c87b03e5Sespie /* End a catch clause.  Control will resume after the try/catch block.  */
673c87b03e5Sespie 
674c87b03e5Sespie void
expand_end_catch()675c87b03e5Sespie expand_end_catch ()
676c87b03e5Sespie {
677c87b03e5Sespie   struct eh_region *try_region, *catch_region;
678c87b03e5Sespie 
679c87b03e5Sespie   if (! doing_eh (0))
680c87b03e5Sespie     return;
681c87b03e5Sespie 
682c87b03e5Sespie   catch_region = expand_eh_region_end ();
683c87b03e5Sespie   try_region = cfun->eh->try_region;
684c87b03e5Sespie 
685c87b03e5Sespie   emit_jump (try_region->u.try.continue_label);
686c87b03e5Sespie }
687c87b03e5Sespie 
688c87b03e5Sespie /* End a sequence of catch handlers for a try block.  */
689c87b03e5Sespie 
690c87b03e5Sespie void
expand_end_all_catch()691c87b03e5Sespie expand_end_all_catch ()
692c87b03e5Sespie {
693c87b03e5Sespie   struct eh_region *try_region;
694c87b03e5Sespie 
695c87b03e5Sespie   if (! doing_eh (0))
696c87b03e5Sespie     return;
697c87b03e5Sespie 
698c87b03e5Sespie   try_region = cfun->eh->try_region;
699c87b03e5Sespie   cfun->eh->try_region = try_region->u.try.prev_try;
700c87b03e5Sespie 
701c87b03e5Sespie   emit_label (try_region->u.try.continue_label);
702c87b03e5Sespie }
703c87b03e5Sespie 
704c87b03e5Sespie /* End an exception region for an exception type filter.  ALLOWED is a
705c87b03e5Sespie    TREE_LIST of types to be matched by the runtime.  FAILURE is an
706c87b03e5Sespie    expression to invoke if a mismatch occurs.
707c87b03e5Sespie 
708c87b03e5Sespie    ??? We could use these semantics for calls to rethrow, too; if we can
709c87b03e5Sespie    see the surrounding catch clause, we know that the exception we're
710c87b03e5Sespie    rethrowing satisfies the "filter" of the catch type.  */
711c87b03e5Sespie 
712c87b03e5Sespie void
expand_eh_region_end_allowed(allowed,failure)713c87b03e5Sespie expand_eh_region_end_allowed (allowed, failure)
714c87b03e5Sespie      tree allowed, failure;
715c87b03e5Sespie {
716c87b03e5Sespie   struct eh_region *region;
717c87b03e5Sespie   rtx around_label;
718c87b03e5Sespie 
719c87b03e5Sespie   if (! doing_eh (0))
720c87b03e5Sespie     return;
721c87b03e5Sespie 
722c87b03e5Sespie   region = expand_eh_region_end ();
723c87b03e5Sespie   region->type = ERT_ALLOWED_EXCEPTIONS;
724c87b03e5Sespie   region->u.allowed.type_list = allowed;
725c87b03e5Sespie   region->label = gen_label_rtx ();
726c87b03e5Sespie 
727c87b03e5Sespie   for (; allowed ; allowed = TREE_CHAIN (allowed))
728c87b03e5Sespie     add_type_for_runtime (TREE_VALUE (allowed));
729c87b03e5Sespie 
730c87b03e5Sespie   /* We must emit the call to FAILURE here, so that if this function
731c87b03e5Sespie      throws a different exception, that it will be processed by the
732c87b03e5Sespie      correct region.  */
733c87b03e5Sespie 
734c87b03e5Sespie   around_label = gen_label_rtx ();
735c87b03e5Sespie   emit_jump (around_label);
736c87b03e5Sespie 
737c87b03e5Sespie   emit_label (region->label);
738c87b03e5Sespie   expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
739c87b03e5Sespie   /* We must adjust the stack before we reach the AROUND_LABEL because
740c87b03e5Sespie      the call to FAILURE does not occur on all paths to the
741c87b03e5Sespie      AROUND_LABEL.  */
742c87b03e5Sespie   do_pending_stack_adjust ();
743c87b03e5Sespie 
744c87b03e5Sespie   emit_label (around_label);
745c87b03e5Sespie }
746c87b03e5Sespie 
747c87b03e5Sespie /* End an exception region for a must-not-throw filter.  FAILURE is an
748c87b03e5Sespie    expression invoke if an uncaught exception propagates this far.
749c87b03e5Sespie 
750c87b03e5Sespie    This is conceptually identical to expand_eh_region_end_allowed with
751c87b03e5Sespie    an empty allowed list (if you passed "std::terminate" instead of
752c87b03e5Sespie    "__cxa_call_unexpected"), but they are represented differently in
753c87b03e5Sespie    the C++ LSDA.  */
754c87b03e5Sespie 
755c87b03e5Sespie void
expand_eh_region_end_must_not_throw(failure)756c87b03e5Sespie expand_eh_region_end_must_not_throw (failure)
757c87b03e5Sespie      tree failure;
758c87b03e5Sespie {
759c87b03e5Sespie   struct eh_region *region;
760c87b03e5Sespie   rtx around_label;
761c87b03e5Sespie 
762c87b03e5Sespie   if (! doing_eh (0))
763c87b03e5Sespie     return;
764c87b03e5Sespie 
765c87b03e5Sespie   region = expand_eh_region_end ();
766c87b03e5Sespie   region->type = ERT_MUST_NOT_THROW;
767c87b03e5Sespie   region->label = gen_label_rtx ();
768c87b03e5Sespie 
769c87b03e5Sespie   /* We must emit the call to FAILURE here, so that if this function
770c87b03e5Sespie      throws a different exception, that it will be processed by the
771c87b03e5Sespie      correct region.  */
772c87b03e5Sespie 
773c87b03e5Sespie   around_label = gen_label_rtx ();
774c87b03e5Sespie   emit_jump (around_label);
775c87b03e5Sespie 
776c87b03e5Sespie   emit_label (region->label);
777c87b03e5Sespie   expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
778c87b03e5Sespie 
779c87b03e5Sespie   emit_label (around_label);
780c87b03e5Sespie }
781c87b03e5Sespie 
782c87b03e5Sespie /* End an exception region for a throw.  No handling goes on here,
783c87b03e5Sespie    but it's the easiest way for the front-end to indicate what type
784c87b03e5Sespie    is being thrown.  */
785c87b03e5Sespie 
786c87b03e5Sespie void
expand_eh_region_end_throw(type)787c87b03e5Sespie expand_eh_region_end_throw (type)
788c87b03e5Sespie      tree type;
789c87b03e5Sespie {
790c87b03e5Sespie   struct eh_region *region;
791c87b03e5Sespie 
792c87b03e5Sespie   if (! doing_eh (0))
793c87b03e5Sespie     return;
794c87b03e5Sespie 
795c87b03e5Sespie   region = expand_eh_region_end ();
796c87b03e5Sespie   region->type = ERT_THROW;
797c87b03e5Sespie   region->u.throw.type = type;
798c87b03e5Sespie }
799c87b03e5Sespie 
800c87b03e5Sespie /* End a fixup region.  Within this region the cleanups for the immediately
801c87b03e5Sespie    enclosing region are _not_ run.  This is used for goto cleanup to avoid
802c87b03e5Sespie    destroying an object twice.
803c87b03e5Sespie 
804c87b03e5Sespie    This would be an extraordinarily simple prospect, were it not for the
805c87b03e5Sespie    fact that we don't actually know what the immediately enclosing region
806c87b03e5Sespie    is.  This surprising fact is because expand_cleanups is currently
807c87b03e5Sespie    generating a sequence that it will insert somewhere else.  We collect
808c87b03e5Sespie    the proper notion of "enclosing" in convert_from_eh_region_ranges.  */
809c87b03e5Sespie 
810c87b03e5Sespie void
expand_eh_region_end_fixup(handler)811c87b03e5Sespie expand_eh_region_end_fixup (handler)
812c87b03e5Sespie      tree handler;
813c87b03e5Sespie {
814c87b03e5Sespie   struct eh_region *fixup;
815c87b03e5Sespie 
816c87b03e5Sespie   if (! doing_eh (0))
817c87b03e5Sespie     return;
818c87b03e5Sespie 
819c87b03e5Sespie   fixup = expand_eh_region_end ();
820c87b03e5Sespie   fixup->type = ERT_FIXUP;
821c87b03e5Sespie   fixup->u.fixup.cleanup_exp = handler;
822c87b03e5Sespie }
823c87b03e5Sespie 
824c87b03e5Sespie /* Note that the current EH region (if any) may contain a throw, or a
825c87b03e5Sespie    call to a function which itself may contain a throw.  */
826c87b03e5Sespie 
827c87b03e5Sespie void
note_eh_region_may_contain_throw()828c87b03e5Sespie note_eh_region_may_contain_throw ()
829c87b03e5Sespie {
830c87b03e5Sespie   struct eh_region *region;
831c87b03e5Sespie 
832c87b03e5Sespie   region = cfun->eh->cur_region;
833c87b03e5Sespie   while (region && !region->may_contain_throw)
834c87b03e5Sespie     {
835c87b03e5Sespie       region->may_contain_throw = 1;
836c87b03e5Sespie       region = region->outer;
837c87b03e5Sespie     }
838c87b03e5Sespie }
839c87b03e5Sespie 
840c87b03e5Sespie /* Return an rtl expression for a pointer to the exception object
841c87b03e5Sespie    within a handler.  */
842c87b03e5Sespie 
843c87b03e5Sespie rtx
get_exception_pointer(fun)844c87b03e5Sespie get_exception_pointer (fun)
845c87b03e5Sespie      struct function *fun;
846c87b03e5Sespie {
847c87b03e5Sespie   rtx exc_ptr = fun->eh->exc_ptr;
848c87b03e5Sespie   if (fun == cfun && ! exc_ptr)
849c87b03e5Sespie     {
850c87b03e5Sespie       exc_ptr = gen_reg_rtx (ptr_mode);
851c87b03e5Sespie       fun->eh->exc_ptr = exc_ptr;
852c87b03e5Sespie     }
853c87b03e5Sespie   return exc_ptr;
854c87b03e5Sespie }
855c87b03e5Sespie 
856c87b03e5Sespie /* Return an rtl expression for the exception dispatch filter
857c87b03e5Sespie    within a handler.  */
858c87b03e5Sespie 
859c87b03e5Sespie static rtx
get_exception_filter(fun)860c87b03e5Sespie get_exception_filter (fun)
861c87b03e5Sespie      struct function *fun;
862c87b03e5Sespie {
863c87b03e5Sespie   rtx filter = fun->eh->filter;
864c87b03e5Sespie   if (fun == cfun && ! filter)
865c87b03e5Sespie     {
866c87b03e5Sespie       filter = gen_reg_rtx (word_mode);
867c87b03e5Sespie       fun->eh->filter = filter;
868c87b03e5Sespie     }
869c87b03e5Sespie   return filter;
870c87b03e5Sespie }
871c87b03e5Sespie 
872c87b03e5Sespie /* This section is for the exception handling specific optimization pass.  */
873c87b03e5Sespie 
874c87b03e5Sespie /* Random access the exception region tree.  It's just as simple to
875c87b03e5Sespie    collect the regions this way as in expand_eh_region_start, but
876c87b03e5Sespie    without having to realloc memory.  */
877c87b03e5Sespie 
878c87b03e5Sespie static void
collect_eh_region_array()879c87b03e5Sespie collect_eh_region_array ()
880c87b03e5Sespie {
881c87b03e5Sespie   struct eh_region **array, *i;
882c87b03e5Sespie 
883c87b03e5Sespie   i = cfun->eh->region_tree;
884c87b03e5Sespie   if (! i)
885c87b03e5Sespie     return;
886c87b03e5Sespie 
887c87b03e5Sespie   array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
888c87b03e5Sespie 			     * sizeof (*array));
889c87b03e5Sespie   cfun->eh->region_array = array;
890c87b03e5Sespie 
891c87b03e5Sespie   while (1)
892c87b03e5Sespie     {
893c87b03e5Sespie       array[i->region_number] = i;
894c87b03e5Sespie 
895c87b03e5Sespie       /* If there are sub-regions, process them.  */
896c87b03e5Sespie       if (i->inner)
897c87b03e5Sespie 	i = i->inner;
898c87b03e5Sespie       /* If there are peers, process them.  */
899c87b03e5Sespie       else if (i->next_peer)
900c87b03e5Sespie 	i = i->next_peer;
901c87b03e5Sespie       /* Otherwise, step back up the tree to the next peer.  */
902c87b03e5Sespie       else
903c87b03e5Sespie 	{
904c87b03e5Sespie 	  do {
905c87b03e5Sespie 	    i = i->outer;
906c87b03e5Sespie 	    if (i == NULL)
907c87b03e5Sespie 	      return;
908c87b03e5Sespie 	  } while (i->next_peer == NULL);
909c87b03e5Sespie 	  i = i->next_peer;
910c87b03e5Sespie 	}
911c87b03e5Sespie     }
912c87b03e5Sespie }
913c87b03e5Sespie 
914c87b03e5Sespie static void
resolve_fixup_regions()915c87b03e5Sespie resolve_fixup_regions ()
916c87b03e5Sespie {
917c87b03e5Sespie   int i, j, n = cfun->eh->last_region_number;
918c87b03e5Sespie 
919c87b03e5Sespie   for (i = 1; i <= n; ++i)
920c87b03e5Sespie     {
921c87b03e5Sespie       struct eh_region *fixup = cfun->eh->region_array[i];
922c87b03e5Sespie       struct eh_region *cleanup = 0;
923c87b03e5Sespie 
924c87b03e5Sespie       if (! fixup || fixup->type != ERT_FIXUP)
925c87b03e5Sespie 	continue;
926c87b03e5Sespie 
927c87b03e5Sespie       for (j = 1; j <= n; ++j)
928c87b03e5Sespie 	{
929c87b03e5Sespie 	  cleanup = cfun->eh->region_array[j];
930c87b03e5Sespie 	  if (cleanup->type == ERT_CLEANUP
931c87b03e5Sespie 	      && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
932c87b03e5Sespie 	    break;
933c87b03e5Sespie 	}
934c87b03e5Sespie       if (j > n)
935c87b03e5Sespie 	abort ();
936c87b03e5Sespie 
937c87b03e5Sespie       fixup->u.fixup.real_region = cleanup->outer;
938c87b03e5Sespie     }
939c87b03e5Sespie }
940c87b03e5Sespie 
941c87b03e5Sespie /* Now that we've discovered what region actually encloses a fixup,
942c87b03e5Sespie    we can shuffle pointers and remove them from the tree.  */
943c87b03e5Sespie 
944c87b03e5Sespie static void
remove_fixup_regions()945c87b03e5Sespie remove_fixup_regions ()
946c87b03e5Sespie {
947c87b03e5Sespie   int i;
948c87b03e5Sespie   rtx insn, note;
949c87b03e5Sespie   struct eh_region *fixup;
950c87b03e5Sespie 
951c87b03e5Sespie   /* Walk the insn chain and adjust the REG_EH_REGION numbers
952c87b03e5Sespie      for instructions referencing fixup regions.  This is only
953c87b03e5Sespie      strictly necessary for fixup regions with no parent, but
954c87b03e5Sespie      doesn't hurt to do it for all regions.  */
955c87b03e5Sespie   for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
956c87b03e5Sespie     if (INSN_P (insn)
957c87b03e5Sespie 	&& (note = find_reg_note (insn, REG_EH_REGION, NULL))
958c87b03e5Sespie 	&& INTVAL (XEXP (note, 0)) > 0
959c87b03e5Sespie 	&& (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
960c87b03e5Sespie 	&& fixup->type == ERT_FIXUP)
961c87b03e5Sespie       {
962c87b03e5Sespie 	if (fixup->u.fixup.real_region)
963c87b03e5Sespie 	  XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
964c87b03e5Sespie 	else
965c87b03e5Sespie 	  remove_note (insn, note);
966c87b03e5Sespie       }
967c87b03e5Sespie 
968c87b03e5Sespie   /* Remove the fixup regions from the tree.  */
969c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
970c87b03e5Sespie     {
971c87b03e5Sespie       fixup = cfun->eh->region_array[i];
972c87b03e5Sespie       if (! fixup)
973c87b03e5Sespie 	continue;
974c87b03e5Sespie 
975c87b03e5Sespie       /* Allow GC to maybe free some memory.  */
976c87b03e5Sespie       if (fixup->type == ERT_CLEANUP)
977c87b03e5Sespie 	fixup->u.cleanup.exp = NULL_TREE;
978c87b03e5Sespie 
979c87b03e5Sespie       if (fixup->type != ERT_FIXUP)
980c87b03e5Sespie 	continue;
981c87b03e5Sespie 
982c87b03e5Sespie       if (fixup->inner)
983c87b03e5Sespie 	{
984c87b03e5Sespie 	  struct eh_region *parent, *p, **pp;
985c87b03e5Sespie 
986c87b03e5Sespie 	  parent = fixup->u.fixup.real_region;
987c87b03e5Sespie 
988c87b03e5Sespie 	  /* Fix up the children's parent pointers; find the end of
989c87b03e5Sespie 	     the list.  */
990c87b03e5Sespie 	  for (p = fixup->inner; ; p = p->next_peer)
991c87b03e5Sespie 	    {
992c87b03e5Sespie 	      p->outer = parent;
993c87b03e5Sespie 	      if (! p->next_peer)
994c87b03e5Sespie 		break;
995c87b03e5Sespie 	    }
996c87b03e5Sespie 
997c87b03e5Sespie 	  /* In the tree of cleanups, only outer-inner ordering matters.
998c87b03e5Sespie 	     So link the children back in anywhere at the correct level.  */
999c87b03e5Sespie 	  if (parent)
1000c87b03e5Sespie 	    pp = &parent->inner;
1001c87b03e5Sespie 	  else
1002c87b03e5Sespie 	    pp = &cfun->eh->region_tree;
1003c87b03e5Sespie 	  p->next_peer = *pp;
1004c87b03e5Sespie 	  *pp = fixup->inner;
1005c87b03e5Sespie 	  fixup->inner = NULL;
1006c87b03e5Sespie 	}
1007c87b03e5Sespie 
1008c87b03e5Sespie       remove_eh_handler (fixup);
1009c87b03e5Sespie     }
1010c87b03e5Sespie }
1011c87b03e5Sespie 
1012c87b03e5Sespie /* Remove all regions whose labels are not reachable from insns.  */
1013c87b03e5Sespie 
1014c87b03e5Sespie static void
remove_unreachable_regions(insns)1015c87b03e5Sespie remove_unreachable_regions (insns)
1016c87b03e5Sespie      rtx insns;
1017c87b03e5Sespie {
1018c87b03e5Sespie   int i, *uid_region_num;
1019c87b03e5Sespie   bool *reachable;
1020c87b03e5Sespie   struct eh_region *r;
1021c87b03e5Sespie   rtx insn;
1022c87b03e5Sespie 
1023c87b03e5Sespie   uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1024c87b03e5Sespie   reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1025c87b03e5Sespie 
1026c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
1027c87b03e5Sespie     {
1028c87b03e5Sespie       r = cfun->eh->region_array[i];
1029c87b03e5Sespie       if (!r || r->region_number != i)
1030c87b03e5Sespie 	continue;
1031c87b03e5Sespie 
1032c87b03e5Sespie       if (r->resume)
1033c87b03e5Sespie 	{
1034c87b03e5Sespie 	  if (uid_region_num[INSN_UID (r->resume)])
1035c87b03e5Sespie 	    abort ();
1036c87b03e5Sespie 	  uid_region_num[INSN_UID (r->resume)] = i;
1037c87b03e5Sespie 	}
1038c87b03e5Sespie       if (r->label)
1039c87b03e5Sespie 	{
1040c87b03e5Sespie 	  if (uid_region_num[INSN_UID (r->label)])
1041c87b03e5Sespie 	    abort ();
1042c87b03e5Sespie 	  uid_region_num[INSN_UID (r->label)] = i;
1043c87b03e5Sespie 	}
1044c87b03e5Sespie       if (r->type == ERT_TRY && r->u.try.continue_label)
1045c87b03e5Sespie 	{
1046c87b03e5Sespie 	  if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1047c87b03e5Sespie 	    abort ();
1048c87b03e5Sespie 	  uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1049c87b03e5Sespie 	}
1050c87b03e5Sespie     }
1051c87b03e5Sespie 
1052c87b03e5Sespie   for (insn = insns; insn; insn = NEXT_INSN (insn))
1053c87b03e5Sespie     reachable[uid_region_num[INSN_UID (insn)]] = true;
1054c87b03e5Sespie 
1055c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
1056c87b03e5Sespie     {
1057c87b03e5Sespie       r = cfun->eh->region_array[i];
1058c87b03e5Sespie       if (r && r->region_number == i && !reachable[i])
1059c87b03e5Sespie 	{
1060c87b03e5Sespie 	  /* Don't remove ERT_THROW regions if their outer region
1061c87b03e5Sespie 	     is reachable.  */
1062c87b03e5Sespie 	  if (r->type == ERT_THROW
1063c87b03e5Sespie 	      && r->outer
1064c87b03e5Sespie 	      && reachable[r->outer->region_number])
1065c87b03e5Sespie 	    continue;
1066c87b03e5Sespie 
1067c87b03e5Sespie 	  remove_eh_handler (r);
1068c87b03e5Sespie 	}
1069c87b03e5Sespie     }
1070c87b03e5Sespie 
1071c87b03e5Sespie   free (reachable);
1072c87b03e5Sespie   free (uid_region_num);
1073c87b03e5Sespie }
1074c87b03e5Sespie 
1075c87b03e5Sespie /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1076c87b03e5Sespie    can_throw instruction in the region.  */
1077c87b03e5Sespie 
1078c87b03e5Sespie static void
convert_from_eh_region_ranges_1(pinsns,orig_sp,cur)1079c87b03e5Sespie convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1080c87b03e5Sespie      rtx *pinsns;
1081c87b03e5Sespie      int *orig_sp;
1082c87b03e5Sespie      int cur;
1083c87b03e5Sespie {
1084c87b03e5Sespie   int *sp = orig_sp;
1085c87b03e5Sespie   rtx insn, next;
1086c87b03e5Sespie 
1087c87b03e5Sespie   for (insn = *pinsns; insn ; insn = next)
1088c87b03e5Sespie     {
1089c87b03e5Sespie       next = NEXT_INSN (insn);
1090c87b03e5Sespie       if (GET_CODE (insn) == NOTE)
1091c87b03e5Sespie 	{
1092c87b03e5Sespie 	  int kind = NOTE_LINE_NUMBER (insn);
1093c87b03e5Sespie 	  if (kind == NOTE_INSN_EH_REGION_BEG
1094c87b03e5Sespie 	      || kind == NOTE_INSN_EH_REGION_END)
1095c87b03e5Sespie 	    {
1096c87b03e5Sespie 	      if (kind == NOTE_INSN_EH_REGION_BEG)
1097c87b03e5Sespie 		{
1098c87b03e5Sespie 		  struct eh_region *r;
1099c87b03e5Sespie 
1100c87b03e5Sespie 		  *sp++ = cur;
1101c87b03e5Sespie 		  cur = NOTE_EH_HANDLER (insn);
1102c87b03e5Sespie 
1103c87b03e5Sespie 		  r = cfun->eh->region_array[cur];
1104c87b03e5Sespie 		  if (r->type == ERT_FIXUP)
1105c87b03e5Sespie 		    {
1106c87b03e5Sespie 		      r = r->u.fixup.real_region;
1107c87b03e5Sespie 		      cur = r ? r->region_number : 0;
1108c87b03e5Sespie 		    }
1109c87b03e5Sespie 		  else if (r->type == ERT_CATCH)
1110c87b03e5Sespie 		    {
1111c87b03e5Sespie 		      r = r->outer;
1112c87b03e5Sespie 		      cur = r ? r->region_number : 0;
1113c87b03e5Sespie 		    }
1114c87b03e5Sespie 		}
1115c87b03e5Sespie 	      else
1116c87b03e5Sespie 		cur = *--sp;
1117c87b03e5Sespie 
1118c87b03e5Sespie 	      /* Removing the first insn of a CALL_PLACEHOLDER sequence
1119c87b03e5Sespie 		 requires extra care to adjust sequence start.  */
1120c87b03e5Sespie 	      if (insn == *pinsns)
1121c87b03e5Sespie 		*pinsns = next;
1122c87b03e5Sespie 	      remove_insn (insn);
1123c87b03e5Sespie 	      continue;
1124c87b03e5Sespie 	    }
1125c87b03e5Sespie 	}
1126c87b03e5Sespie       else if (INSN_P (insn))
1127c87b03e5Sespie 	{
1128c87b03e5Sespie 	  if (cur > 0
1129c87b03e5Sespie 	      && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1130c87b03e5Sespie 	      /* Calls can always potentially throw exceptions, unless
1131c87b03e5Sespie 		 they have a REG_EH_REGION note with a value of 0 or less.
1132c87b03e5Sespie 		 Which should be the only possible kind so far.  */
1133c87b03e5Sespie 	      && (GET_CODE (insn) == CALL_INSN
1134c87b03e5Sespie 		  /* If we wanted exceptions for non-call insns, then
1135c87b03e5Sespie 		     any may_trap_p instruction could throw.  */
1136c87b03e5Sespie 		  || (flag_non_call_exceptions
1137c87b03e5Sespie 		      && GET_CODE (PATTERN (insn)) != CLOBBER
1138c87b03e5Sespie 		      && GET_CODE (PATTERN (insn)) != USE
1139c87b03e5Sespie 		      && may_trap_p (PATTERN (insn)))))
1140c87b03e5Sespie 	    {
1141c87b03e5Sespie 	      REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1142c87b03e5Sespie 						  REG_NOTES (insn));
1143c87b03e5Sespie 	    }
1144c87b03e5Sespie 
1145c87b03e5Sespie 	  if (GET_CODE (insn) == CALL_INSN
1146c87b03e5Sespie 	      && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1147c87b03e5Sespie 	    {
1148c87b03e5Sespie 	      convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1149c87b03e5Sespie 					       sp, cur);
1150c87b03e5Sespie 	      convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1151c87b03e5Sespie 					       sp, cur);
1152c87b03e5Sespie 	      convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1153c87b03e5Sespie 					       sp, cur);
1154c87b03e5Sespie 	    }
1155c87b03e5Sespie 	}
1156c87b03e5Sespie     }
1157c87b03e5Sespie 
1158c87b03e5Sespie   if (sp != orig_sp)
1159c87b03e5Sespie     abort ();
1160c87b03e5Sespie }
1161c87b03e5Sespie 
1162c87b03e5Sespie void
convert_from_eh_region_ranges()1163c87b03e5Sespie convert_from_eh_region_ranges ()
1164c87b03e5Sespie {
1165c87b03e5Sespie   int *stack;
1166c87b03e5Sespie   rtx insns;
1167c87b03e5Sespie 
1168c87b03e5Sespie   collect_eh_region_array ();
1169c87b03e5Sespie   resolve_fixup_regions ();
1170c87b03e5Sespie 
1171c87b03e5Sespie   stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1172c87b03e5Sespie   insns = get_insns ();
1173c87b03e5Sespie   convert_from_eh_region_ranges_1 (&insns, stack, 0);
1174c87b03e5Sespie   free (stack);
1175c87b03e5Sespie 
1176c87b03e5Sespie   remove_fixup_regions ();
1177c87b03e5Sespie   remove_unreachable_regions (insns);
1178c87b03e5Sespie }
1179c87b03e5Sespie 
1180c87b03e5Sespie static void
add_ehl_entry(label,region)1181c87b03e5Sespie add_ehl_entry (label, region)
1182c87b03e5Sespie      rtx label;
1183c87b03e5Sespie      struct eh_region *region;
1184c87b03e5Sespie {
1185c87b03e5Sespie   struct ehl_map_entry **slot, *entry;
1186c87b03e5Sespie 
1187c87b03e5Sespie   LABEL_PRESERVE_P (label) = 1;
1188c87b03e5Sespie 
1189c87b03e5Sespie   entry = (struct ehl_map_entry *) ggc_alloc (sizeof (*entry));
1190c87b03e5Sespie   entry->label = label;
1191c87b03e5Sespie   entry->region = region;
1192c87b03e5Sespie 
1193c87b03e5Sespie   slot = (struct ehl_map_entry **)
1194c87b03e5Sespie     htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
1195c87b03e5Sespie 
1196c87b03e5Sespie   /* Before landing pad creation, each exception handler has its own
1197c87b03e5Sespie      label.  After landing pad creation, the exception handlers may
1198c87b03e5Sespie      share landing pads.  This is ok, since maybe_remove_eh_handler
1199c87b03e5Sespie      only requires the 1-1 mapping before landing pad creation.  */
1200c87b03e5Sespie   if (*slot && !cfun->eh->built_landing_pads)
1201c87b03e5Sespie     abort ();
1202c87b03e5Sespie 
1203c87b03e5Sespie   *slot = entry;
1204c87b03e5Sespie }
1205c87b03e5Sespie 
1206c87b03e5Sespie void
find_exception_handler_labels()1207c87b03e5Sespie find_exception_handler_labels ()
1208c87b03e5Sespie {
1209c87b03e5Sespie   int i;
1210c87b03e5Sespie 
1211c87b03e5Sespie   if (cfun->eh->exception_handler_label_map)
1212c87b03e5Sespie     htab_empty (cfun->eh->exception_handler_label_map);
1213c87b03e5Sespie   else
1214c87b03e5Sespie     {
1215c87b03e5Sespie       /* ??? The expansion factor here (3/2) must be greater than the htab
1216c87b03e5Sespie 	 occupancy factor (4/3) to avoid unnecessary resizing.  */
1217c87b03e5Sespie       cfun->eh->exception_handler_label_map
1218c87b03e5Sespie         = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1219c87b03e5Sespie 			   ehl_hash, ehl_eq, NULL);
1220c87b03e5Sespie     }
1221c87b03e5Sespie 
1222c87b03e5Sespie   if (cfun->eh->region_tree == NULL)
1223c87b03e5Sespie     return;
1224c87b03e5Sespie 
1225c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
1226c87b03e5Sespie     {
1227c87b03e5Sespie       struct eh_region *region = cfun->eh->region_array[i];
1228c87b03e5Sespie       rtx lab;
1229c87b03e5Sespie 
1230c87b03e5Sespie       if (! region || region->region_number != i)
1231c87b03e5Sespie 	continue;
1232c87b03e5Sespie       if (cfun->eh->built_landing_pads)
1233c87b03e5Sespie 	lab = region->landing_pad;
1234c87b03e5Sespie       else
1235c87b03e5Sespie 	lab = region->label;
1236c87b03e5Sespie 
1237c87b03e5Sespie       if (lab)
1238c87b03e5Sespie 	add_ehl_entry (lab, region);
1239c87b03e5Sespie     }
1240c87b03e5Sespie 
1241c87b03e5Sespie   /* For sjlj exceptions, need the return label to remain live until
1242c87b03e5Sespie      after landing pad generation.  */
1243c87b03e5Sespie   if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1244c87b03e5Sespie     add_ehl_entry (return_label, NULL);
1245c87b03e5Sespie }
1246c87b03e5Sespie 
1247c87b03e5Sespie bool
current_function_has_exception_handlers()1248c87b03e5Sespie current_function_has_exception_handlers ()
1249c87b03e5Sespie {
1250c87b03e5Sespie   int i;
1251c87b03e5Sespie 
1252c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
1253c87b03e5Sespie     {
1254c87b03e5Sespie       struct eh_region *region = cfun->eh->region_array[i];
1255c87b03e5Sespie 
1256c87b03e5Sespie       if (! region || region->region_number != i)
1257c87b03e5Sespie 	continue;
1258c87b03e5Sespie       if (region->type != ERT_THROW)
1259c87b03e5Sespie 	return true;
1260c87b03e5Sespie     }
1261c87b03e5Sespie 
1262c87b03e5Sespie   return false;
1263c87b03e5Sespie }
1264c87b03e5Sespie 
1265c87b03e5Sespie static struct eh_region *
duplicate_eh_region_1(o,map)1266c87b03e5Sespie duplicate_eh_region_1 (o, map)
1267c87b03e5Sespie      struct eh_region *o;
1268c87b03e5Sespie      struct inline_remap *map;
1269c87b03e5Sespie {
1270c87b03e5Sespie   struct eh_region *n
1271c87b03e5Sespie     = (struct eh_region *) ggc_alloc_cleared (sizeof (struct eh_region));
1272c87b03e5Sespie 
1273c87b03e5Sespie   n->region_number = o->region_number + cfun->eh->last_region_number;
1274c87b03e5Sespie   n->type = o->type;
1275c87b03e5Sespie 
1276c87b03e5Sespie   switch (n->type)
1277c87b03e5Sespie     {
1278c87b03e5Sespie     case ERT_CLEANUP:
1279c87b03e5Sespie     case ERT_MUST_NOT_THROW:
1280c87b03e5Sespie       break;
1281c87b03e5Sespie 
1282c87b03e5Sespie     case ERT_TRY:
1283c87b03e5Sespie       if (o->u.try.continue_label)
1284c87b03e5Sespie 	n->u.try.continue_label
1285c87b03e5Sespie 	  = get_label_from_map (map,
1286c87b03e5Sespie 				CODE_LABEL_NUMBER (o->u.try.continue_label));
1287c87b03e5Sespie       break;
1288c87b03e5Sespie 
1289c87b03e5Sespie     case ERT_CATCH:
1290c87b03e5Sespie       n->u.catch.type_list = o->u.catch.type_list;
1291c87b03e5Sespie       break;
1292c87b03e5Sespie 
1293c87b03e5Sespie     case ERT_ALLOWED_EXCEPTIONS:
1294c87b03e5Sespie       n->u.allowed.type_list = o->u.allowed.type_list;
1295c87b03e5Sespie       break;
1296c87b03e5Sespie 
1297c87b03e5Sespie     case ERT_THROW:
1298c87b03e5Sespie       n->u.throw.type = o->u.throw.type;
1299c87b03e5Sespie 
1300c87b03e5Sespie     default:
1301c87b03e5Sespie       abort ();
1302c87b03e5Sespie     }
1303c87b03e5Sespie 
1304c87b03e5Sespie   if (o->label)
1305c87b03e5Sespie     n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1306c87b03e5Sespie   if (o->resume)
1307c87b03e5Sespie     {
1308c87b03e5Sespie       n->resume = map->insn_map[INSN_UID (o->resume)];
1309c87b03e5Sespie       if (n->resume == NULL)
1310c87b03e5Sespie 	abort ();
1311c87b03e5Sespie     }
1312c87b03e5Sespie 
1313c87b03e5Sespie   return n;
1314c87b03e5Sespie }
1315c87b03e5Sespie 
1316c87b03e5Sespie static void
duplicate_eh_region_2(o,n_array)1317c87b03e5Sespie duplicate_eh_region_2 (o, n_array)
1318c87b03e5Sespie      struct eh_region *o;
1319c87b03e5Sespie      struct eh_region **n_array;
1320c87b03e5Sespie {
1321c87b03e5Sespie   struct eh_region *n = n_array[o->region_number];
1322c87b03e5Sespie 
1323c87b03e5Sespie   switch (n->type)
1324c87b03e5Sespie     {
1325c87b03e5Sespie     case ERT_TRY:
1326c87b03e5Sespie       n->u.try.catch = n_array[o->u.try.catch->region_number];
1327c87b03e5Sespie       n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1328c87b03e5Sespie       break;
1329c87b03e5Sespie 
1330c87b03e5Sespie     case ERT_CATCH:
1331c87b03e5Sespie       if (o->u.catch.next_catch)
1332c87b03e5Sespie 	n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1333c87b03e5Sespie       if (o->u.catch.prev_catch)
1334c87b03e5Sespie 	n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1335c87b03e5Sespie       break;
1336c87b03e5Sespie 
1337c87b03e5Sespie     default:
1338c87b03e5Sespie       break;
1339c87b03e5Sespie     }
1340c87b03e5Sespie 
1341c87b03e5Sespie   if (o->outer)
1342c87b03e5Sespie     n->outer = n_array[o->outer->region_number];
1343c87b03e5Sespie   if (o->inner)
1344c87b03e5Sespie     n->inner = n_array[o->inner->region_number];
1345c87b03e5Sespie   if (o->next_peer)
1346c87b03e5Sespie     n->next_peer = n_array[o->next_peer->region_number];
1347c87b03e5Sespie }
1348c87b03e5Sespie 
1349c87b03e5Sespie int
duplicate_eh_regions(ifun,map)1350c87b03e5Sespie duplicate_eh_regions (ifun, map)
1351c87b03e5Sespie      struct function *ifun;
1352c87b03e5Sespie      struct inline_remap *map;
1353c87b03e5Sespie {
1354c87b03e5Sespie   int ifun_last_region_number = ifun->eh->last_region_number;
1355c87b03e5Sespie   struct eh_region **n_array, *root, *cur;
1356c87b03e5Sespie   int i;
1357c87b03e5Sespie 
1358c87b03e5Sespie   if (ifun_last_region_number == 0)
1359c87b03e5Sespie     return 0;
1360c87b03e5Sespie 
1361c87b03e5Sespie   n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1362c87b03e5Sespie 
1363c87b03e5Sespie   for (i = 1; i <= ifun_last_region_number; ++i)
1364c87b03e5Sespie     {
1365c87b03e5Sespie       cur = ifun->eh->region_array[i];
1366c87b03e5Sespie       if (!cur || cur->region_number != i)
1367c87b03e5Sespie 	continue;
1368c87b03e5Sespie       n_array[i] = duplicate_eh_region_1 (cur, map);
1369c87b03e5Sespie     }
1370c87b03e5Sespie   for (i = 1; i <= ifun_last_region_number; ++i)
1371c87b03e5Sespie     {
1372c87b03e5Sespie       cur = ifun->eh->region_array[i];
1373c87b03e5Sespie       if (!cur || cur->region_number != i)
1374c87b03e5Sespie 	continue;
1375c87b03e5Sespie       duplicate_eh_region_2 (cur, n_array);
1376c87b03e5Sespie     }
1377c87b03e5Sespie 
1378c87b03e5Sespie   root = n_array[ifun->eh->region_tree->region_number];
1379c87b03e5Sespie   cur = cfun->eh->cur_region;
1380c87b03e5Sespie   if (cur)
1381c87b03e5Sespie     {
1382c87b03e5Sespie       struct eh_region *p = cur->inner;
1383c87b03e5Sespie       if (p)
1384c87b03e5Sespie 	{
1385c87b03e5Sespie 	  while (p->next_peer)
1386c87b03e5Sespie 	    p = p->next_peer;
1387c87b03e5Sespie 	  p->next_peer = root;
1388c87b03e5Sespie 	}
1389c87b03e5Sespie       else
1390c87b03e5Sespie 	cur->inner = root;
1391c87b03e5Sespie 
1392c87b03e5Sespie       for (i = 1; i <= ifun_last_region_number; ++i)
1393c87b03e5Sespie 	if (n_array[i] && n_array[i]->outer == NULL)
1394c87b03e5Sespie 	  n_array[i]->outer = cur;
1395c87b03e5Sespie     }
1396c87b03e5Sespie   else
1397c87b03e5Sespie     {
1398c87b03e5Sespie       struct eh_region *p = cfun->eh->region_tree;
1399c87b03e5Sespie       if (p)
1400c87b03e5Sespie 	{
1401c87b03e5Sespie 	  while (p->next_peer)
1402c87b03e5Sespie 	    p = p->next_peer;
1403c87b03e5Sespie 	  p->next_peer = root;
1404c87b03e5Sespie 	}
1405c87b03e5Sespie       else
1406c87b03e5Sespie 	cfun->eh->region_tree = root;
1407c87b03e5Sespie     }
1408c87b03e5Sespie 
1409c87b03e5Sespie   free (n_array);
1410c87b03e5Sespie 
1411c87b03e5Sespie   i = cfun->eh->last_region_number;
1412c87b03e5Sespie   cfun->eh->last_region_number = i + ifun_last_region_number;
1413c87b03e5Sespie   return i;
1414c87b03e5Sespie }
1415c87b03e5Sespie 
1416c87b03e5Sespie 
1417c87b03e5Sespie static int
t2r_eq(pentry,pdata)1418c87b03e5Sespie t2r_eq (pentry, pdata)
1419c87b03e5Sespie      const PTR pentry;
1420c87b03e5Sespie      const PTR pdata;
1421c87b03e5Sespie {
1422c87b03e5Sespie   tree entry = (tree) pentry;
1423c87b03e5Sespie   tree data = (tree) pdata;
1424c87b03e5Sespie 
1425c87b03e5Sespie   return TREE_PURPOSE (entry) == data;
1426c87b03e5Sespie }
1427c87b03e5Sespie 
1428c87b03e5Sespie static hashval_t
t2r_hash(pentry)1429c87b03e5Sespie t2r_hash (pentry)
1430c87b03e5Sespie      const PTR pentry;
1431c87b03e5Sespie {
1432c87b03e5Sespie   tree entry = (tree) pentry;
1433c87b03e5Sespie   return TYPE_HASH (TREE_PURPOSE (entry));
1434c87b03e5Sespie }
1435c87b03e5Sespie 
1436c87b03e5Sespie static void
add_type_for_runtime(type)1437c87b03e5Sespie add_type_for_runtime (type)
1438c87b03e5Sespie      tree type;
1439c87b03e5Sespie {
1440c87b03e5Sespie   tree *slot;
1441c87b03e5Sespie 
1442c87b03e5Sespie   slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1443c87b03e5Sespie 					    TYPE_HASH (type), INSERT);
1444c87b03e5Sespie   if (*slot == NULL)
1445c87b03e5Sespie     {
1446c87b03e5Sespie       tree runtime = (*lang_eh_runtime_type) (type);
1447c87b03e5Sespie       *slot = tree_cons (type, runtime, NULL_TREE);
1448c87b03e5Sespie     }
1449c87b03e5Sespie }
1450c87b03e5Sespie 
1451c87b03e5Sespie static tree
lookup_type_for_runtime(type)1452c87b03e5Sespie lookup_type_for_runtime (type)
1453c87b03e5Sespie      tree type;
1454c87b03e5Sespie {
1455c87b03e5Sespie   tree *slot;
1456c87b03e5Sespie 
1457c87b03e5Sespie   slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1458c87b03e5Sespie 					    TYPE_HASH (type), NO_INSERT);
1459c87b03e5Sespie 
1460c87b03e5Sespie   /* We should have always inserted the data earlier.  */
1461c87b03e5Sespie   return TREE_VALUE (*slot);
1462c87b03e5Sespie }
1463c87b03e5Sespie 
1464c87b03e5Sespie 
1465c87b03e5Sespie /* Represent an entry in @TTypes for either catch actions
1466c87b03e5Sespie    or exception filter actions.  */
1467c87b03e5Sespie struct ttypes_filter GTY(())
1468c87b03e5Sespie {
1469c87b03e5Sespie   tree t;
1470c87b03e5Sespie   int filter;
1471c87b03e5Sespie };
1472c87b03e5Sespie 
1473c87b03e5Sespie /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1474c87b03e5Sespie    (a tree) for a @TTypes type node we are thinking about adding.  */
1475c87b03e5Sespie 
1476c87b03e5Sespie static int
ttypes_filter_eq(pentry,pdata)1477c87b03e5Sespie ttypes_filter_eq (pentry, pdata)
1478c87b03e5Sespie      const PTR pentry;
1479c87b03e5Sespie      const PTR pdata;
1480c87b03e5Sespie {
1481c87b03e5Sespie   const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1482c87b03e5Sespie   tree data = (tree) pdata;
1483c87b03e5Sespie 
1484c87b03e5Sespie   return entry->t == data;
1485c87b03e5Sespie }
1486c87b03e5Sespie 
1487c87b03e5Sespie static hashval_t
ttypes_filter_hash(pentry)1488c87b03e5Sespie ttypes_filter_hash (pentry)
1489c87b03e5Sespie      const PTR pentry;
1490c87b03e5Sespie {
1491c87b03e5Sespie   const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1492c87b03e5Sespie   return TYPE_HASH (entry->t);
1493c87b03e5Sespie }
1494c87b03e5Sespie 
1495c87b03e5Sespie /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1496c87b03e5Sespie    exception specification list we are thinking about adding.  */
1497c87b03e5Sespie /* ??? Currently we use the type lists in the order given.  Someone
1498c87b03e5Sespie    should put these in some canonical order.  */
1499c87b03e5Sespie 
1500c87b03e5Sespie static int
ehspec_filter_eq(pentry,pdata)1501c87b03e5Sespie ehspec_filter_eq (pentry, pdata)
1502c87b03e5Sespie      const PTR pentry;
1503c87b03e5Sespie      const PTR pdata;
1504c87b03e5Sespie {
1505c87b03e5Sespie   const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1506c87b03e5Sespie   const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1507c87b03e5Sespie 
1508c87b03e5Sespie   return type_list_equal (entry->t, data->t);
1509c87b03e5Sespie }
1510c87b03e5Sespie 
1511c87b03e5Sespie /* Hash function for exception specification lists.  */
1512c87b03e5Sespie 
1513c87b03e5Sespie static hashval_t
ehspec_filter_hash(pentry)1514c87b03e5Sespie ehspec_filter_hash (pentry)
1515c87b03e5Sespie      const PTR pentry;
1516c87b03e5Sespie {
1517c87b03e5Sespie   const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1518c87b03e5Sespie   hashval_t h = 0;
1519c87b03e5Sespie   tree list;
1520c87b03e5Sespie 
1521c87b03e5Sespie   for (list = entry->t; list ; list = TREE_CHAIN (list))
1522c87b03e5Sespie     h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1523c87b03e5Sespie   return h;
1524c87b03e5Sespie }
1525c87b03e5Sespie 
1526c87b03e5Sespie /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1527c87b03e5Sespie    up the search.  Return the filter value to be used.  */
1528c87b03e5Sespie 
1529c87b03e5Sespie static int
add_ttypes_entry(ttypes_hash,type)1530c87b03e5Sespie add_ttypes_entry (ttypes_hash, type)
1531c87b03e5Sespie      htab_t ttypes_hash;
1532c87b03e5Sespie      tree type;
1533c87b03e5Sespie {
1534c87b03e5Sespie   struct ttypes_filter **slot, *n;
1535c87b03e5Sespie 
1536c87b03e5Sespie   slot = (struct ttypes_filter **)
1537c87b03e5Sespie     htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1538c87b03e5Sespie 
1539c87b03e5Sespie   if ((n = *slot) == NULL)
1540c87b03e5Sespie     {
1541c87b03e5Sespie       /* Filter value is a 1 based table index.  */
1542c87b03e5Sespie 
1543c87b03e5Sespie       n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1544c87b03e5Sespie       n->t = type;
1545c87b03e5Sespie       n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1546c87b03e5Sespie       *slot = n;
1547c87b03e5Sespie 
1548c87b03e5Sespie       VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1549c87b03e5Sespie     }
1550c87b03e5Sespie 
1551c87b03e5Sespie   return n->filter;
1552c87b03e5Sespie }
1553c87b03e5Sespie 
1554c87b03e5Sespie /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1555c87b03e5Sespie    to speed up the search.  Return the filter value to be used.  */
1556c87b03e5Sespie 
1557c87b03e5Sespie static int
add_ehspec_entry(ehspec_hash,ttypes_hash,list)1558c87b03e5Sespie add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1559c87b03e5Sespie      htab_t ehspec_hash;
1560c87b03e5Sespie      htab_t ttypes_hash;
1561c87b03e5Sespie      tree list;
1562c87b03e5Sespie {
1563c87b03e5Sespie   struct ttypes_filter **slot, *n;
1564c87b03e5Sespie   struct ttypes_filter dummy;
1565c87b03e5Sespie 
1566c87b03e5Sespie   dummy.t = list;
1567c87b03e5Sespie   slot = (struct ttypes_filter **)
1568c87b03e5Sespie     htab_find_slot (ehspec_hash, &dummy, INSERT);
1569c87b03e5Sespie 
1570c87b03e5Sespie   if ((n = *slot) == NULL)
1571c87b03e5Sespie     {
1572c87b03e5Sespie       /* Filter value is a -1 based byte index into a uleb128 buffer.  */
1573c87b03e5Sespie 
1574c87b03e5Sespie       n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1575c87b03e5Sespie       n->t = list;
1576c87b03e5Sespie       n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1577c87b03e5Sespie       *slot = n;
1578c87b03e5Sespie 
1579c87b03e5Sespie       /* Look up each type in the list and encode its filter
1580c87b03e5Sespie 	 value as a uleb128.  Terminate the list with 0.  */
1581c87b03e5Sespie       for (; list ; list = TREE_CHAIN (list))
1582c87b03e5Sespie 	push_uleb128 (&cfun->eh->ehspec_data,
1583c87b03e5Sespie 		      add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1584c87b03e5Sespie       VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1585c87b03e5Sespie     }
1586c87b03e5Sespie 
1587c87b03e5Sespie   return n->filter;
1588c87b03e5Sespie }
1589c87b03e5Sespie 
1590c87b03e5Sespie /* Generate the action filter values to be used for CATCH and
1591c87b03e5Sespie    ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
1592c87b03e5Sespie    we use lots of landing pads, and so every type or list can share
1593c87b03e5Sespie    the same filter value, which saves table space.  */
1594c87b03e5Sespie 
1595c87b03e5Sespie static void
assign_filter_values()1596c87b03e5Sespie assign_filter_values ()
1597c87b03e5Sespie {
1598c87b03e5Sespie   int i;
1599c87b03e5Sespie   htab_t ttypes, ehspec;
1600c87b03e5Sespie 
1601c87b03e5Sespie   VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1602c87b03e5Sespie   VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1603c87b03e5Sespie 
1604c87b03e5Sespie   ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1605c87b03e5Sespie   ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1606c87b03e5Sespie 
1607c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
1608c87b03e5Sespie     {
1609c87b03e5Sespie       struct eh_region *r = cfun->eh->region_array[i];
1610c87b03e5Sespie 
1611c87b03e5Sespie       /* Mind we don't process a region more than once.  */
1612c87b03e5Sespie       if (!r || r->region_number != i)
1613c87b03e5Sespie 	continue;
1614c87b03e5Sespie 
1615c87b03e5Sespie       switch (r->type)
1616c87b03e5Sespie 	{
1617c87b03e5Sespie 	case ERT_CATCH:
1618c87b03e5Sespie 	  /* Whatever type_list is (NULL or true list), we build a list
1619c87b03e5Sespie 	     of filters for the region.  */
1620c87b03e5Sespie 	  r->u.catch.filter_list = NULL_TREE;
1621c87b03e5Sespie 
1622c87b03e5Sespie 	  if (r->u.catch.type_list != NULL)
1623c87b03e5Sespie 	    {
1624c87b03e5Sespie 	      /* Get a filter value for each of the types caught and store
1625c87b03e5Sespie 		 them in the region's dedicated list.  */
1626c87b03e5Sespie 	      tree tp_node = r->u.catch.type_list;
1627c87b03e5Sespie 
1628c87b03e5Sespie 	      for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1629c87b03e5Sespie 		{
1630c87b03e5Sespie 		  int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1631c87b03e5Sespie 		  tree flt_node = build_int_2 (flt, 0);
1632c87b03e5Sespie 
1633c87b03e5Sespie 		  r->u.catch.filter_list
1634c87b03e5Sespie 		    = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1635c87b03e5Sespie 		}
1636c87b03e5Sespie 	    }
1637c87b03e5Sespie 	  else
1638c87b03e5Sespie 	    {
1639c87b03e5Sespie 	      /* Get a filter value for the NULL list also since it will need
1640c87b03e5Sespie 		 an action record anyway.  */
1641c87b03e5Sespie 	      int flt = add_ttypes_entry (ttypes, NULL);
1642c87b03e5Sespie 	      tree flt_node = build_int_2 (flt, 0);
1643c87b03e5Sespie 
1644c87b03e5Sespie 	      r->u.catch.filter_list
1645c87b03e5Sespie 		= tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1646c87b03e5Sespie 	    }
1647c87b03e5Sespie 
1648c87b03e5Sespie 	  break;
1649c87b03e5Sespie 
1650c87b03e5Sespie 	case ERT_ALLOWED_EXCEPTIONS:
1651c87b03e5Sespie 	  r->u.allowed.filter
1652c87b03e5Sespie 	    = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1653c87b03e5Sespie 	  break;
1654c87b03e5Sespie 
1655c87b03e5Sespie 	default:
1656c87b03e5Sespie 	  break;
1657c87b03e5Sespie 	}
1658c87b03e5Sespie     }
1659c87b03e5Sespie 
1660c87b03e5Sespie   htab_delete (ttypes);
1661c87b03e5Sespie   htab_delete (ehspec);
1662c87b03e5Sespie }
1663c87b03e5Sespie 
1664c87b03e5Sespie static void
build_post_landing_pads()1665c87b03e5Sespie build_post_landing_pads ()
1666c87b03e5Sespie {
1667c87b03e5Sespie   int i;
1668c87b03e5Sespie 
1669c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
1670c87b03e5Sespie     {
1671c87b03e5Sespie       struct eh_region *region = cfun->eh->region_array[i];
1672c87b03e5Sespie       rtx seq;
1673c87b03e5Sespie 
1674c87b03e5Sespie       /* Mind we don't process a region more than once.  */
1675c87b03e5Sespie       if (!region || region->region_number != i)
1676c87b03e5Sespie 	continue;
1677c87b03e5Sespie 
1678c87b03e5Sespie       switch (region->type)
1679c87b03e5Sespie 	{
1680c87b03e5Sespie 	case ERT_TRY:
1681c87b03e5Sespie 	  /* ??? Collect the set of all non-overlapping catch handlers
1682c87b03e5Sespie 	       all the way up the chain until blocked by a cleanup.  */
1683c87b03e5Sespie 	  /* ??? Outer try regions can share landing pads with inner
1684c87b03e5Sespie 	     try regions if the types are completely non-overlapping,
1685c87b03e5Sespie 	     and there are no intervening cleanups.  */
1686c87b03e5Sespie 
1687c87b03e5Sespie 	  region->post_landing_pad = gen_label_rtx ();
1688c87b03e5Sespie 
1689c87b03e5Sespie 	  start_sequence ();
1690c87b03e5Sespie 
1691c87b03e5Sespie 	  emit_label (region->post_landing_pad);
1692c87b03e5Sespie 
1693c87b03e5Sespie 	  /* ??? It is mighty inconvenient to call back into the
1694c87b03e5Sespie 	     switch statement generation code in expand_end_case.
1695c87b03e5Sespie 	     Rapid prototyping sez a sequence of ifs.  */
1696c87b03e5Sespie 	  {
1697c87b03e5Sespie 	    struct eh_region *c;
1698c87b03e5Sespie 	    for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1699c87b03e5Sespie 	      {
1700c87b03e5Sespie 		if (c->u.catch.type_list == NULL)
1701c87b03e5Sespie 		  emit_jump (c->label);
1702c87b03e5Sespie 		else
1703c87b03e5Sespie 		  {
1704c87b03e5Sespie 		    /* Need for one cmp/jump per type caught. Each type
1705c87b03e5Sespie 		       list entry has a matching entry in the filter list
1706c87b03e5Sespie 		       (see assign_filter_values).  */
1707c87b03e5Sespie 		    tree tp_node = c->u.catch.type_list;
1708c87b03e5Sespie 		    tree flt_node = c->u.catch.filter_list;
1709c87b03e5Sespie 
1710c87b03e5Sespie 		    for (; tp_node; )
1711c87b03e5Sespie 		      {
1712c87b03e5Sespie 			emit_cmp_and_jump_insns
1713c87b03e5Sespie 			  (cfun->eh->filter,
1714c87b03e5Sespie 			   GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1715c87b03e5Sespie 			   EQ, NULL_RTX, word_mode, 0, c->label);
1716c87b03e5Sespie 
1717c87b03e5Sespie 			tp_node = TREE_CHAIN (tp_node);
1718c87b03e5Sespie 			flt_node = TREE_CHAIN (flt_node);
1719c87b03e5Sespie 		      }
1720c87b03e5Sespie 		  }
1721c87b03e5Sespie 	      }
1722c87b03e5Sespie 	  }
1723c87b03e5Sespie 
1724c87b03e5Sespie 	  /* We delay the generation of the _Unwind_Resume until we generate
1725c87b03e5Sespie 	     landing pads.  We emit a marker here so as to get good control
1726c87b03e5Sespie 	     flow data in the meantime.  */
1727c87b03e5Sespie 	  region->resume
1728c87b03e5Sespie 	    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1729c87b03e5Sespie 	  emit_barrier ();
1730c87b03e5Sespie 
1731c87b03e5Sespie 	  seq = get_insns ();
1732c87b03e5Sespie 	  end_sequence ();
1733c87b03e5Sespie 
1734c87b03e5Sespie 	  emit_insn_before (seq, region->u.try.catch->label);
1735c87b03e5Sespie 	  break;
1736c87b03e5Sespie 
1737c87b03e5Sespie 	case ERT_ALLOWED_EXCEPTIONS:
1738c87b03e5Sespie 	  region->post_landing_pad = gen_label_rtx ();
1739c87b03e5Sespie 
1740c87b03e5Sespie 	  start_sequence ();
1741c87b03e5Sespie 
1742c87b03e5Sespie 	  emit_label (region->post_landing_pad);
1743c87b03e5Sespie 
1744c87b03e5Sespie 	  emit_cmp_and_jump_insns (cfun->eh->filter,
1745c87b03e5Sespie 				   GEN_INT (region->u.allowed.filter),
1746c87b03e5Sespie 				   EQ, NULL_RTX, word_mode, 0, region->label);
1747c87b03e5Sespie 
1748c87b03e5Sespie 	  /* We delay the generation of the _Unwind_Resume until we generate
1749c87b03e5Sespie 	     landing pads.  We emit a marker here so as to get good control
1750c87b03e5Sespie 	     flow data in the meantime.  */
1751c87b03e5Sespie 	  region->resume
1752c87b03e5Sespie 	    = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1753c87b03e5Sespie 	  emit_barrier ();
1754c87b03e5Sespie 
1755c87b03e5Sespie 	  seq = get_insns ();
1756c87b03e5Sespie 	  end_sequence ();
1757c87b03e5Sespie 
1758c87b03e5Sespie 	  emit_insn_before (seq, region->label);
1759c87b03e5Sespie 	  break;
1760c87b03e5Sespie 
1761c87b03e5Sespie 	case ERT_CLEANUP:
1762c87b03e5Sespie 	case ERT_MUST_NOT_THROW:
1763c87b03e5Sespie 	  region->post_landing_pad = region->label;
1764c87b03e5Sespie 	  break;
1765c87b03e5Sespie 
1766c87b03e5Sespie 	case ERT_CATCH:
1767c87b03e5Sespie 	case ERT_THROW:
1768c87b03e5Sespie 	  /* Nothing to do.  */
1769c87b03e5Sespie 	  break;
1770c87b03e5Sespie 
1771c87b03e5Sespie 	default:
1772c87b03e5Sespie 	  abort ();
1773c87b03e5Sespie 	}
1774c87b03e5Sespie     }
1775c87b03e5Sespie }
1776c87b03e5Sespie 
1777c87b03e5Sespie /* Replace RESX patterns with jumps to the next handler if any, or calls to
1778c87b03e5Sespie    _Unwind_Resume otherwise.  */
1779c87b03e5Sespie 
1780c87b03e5Sespie static void
connect_post_landing_pads()1781c87b03e5Sespie connect_post_landing_pads ()
1782c87b03e5Sespie {
1783c87b03e5Sespie   int i;
1784c87b03e5Sespie 
1785c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
1786c87b03e5Sespie     {
1787c87b03e5Sespie       struct eh_region *region = cfun->eh->region_array[i];
1788c87b03e5Sespie       struct eh_region *outer;
1789c87b03e5Sespie       rtx seq;
1790c87b03e5Sespie 
1791c87b03e5Sespie       /* Mind we don't process a region more than once.  */
1792c87b03e5Sespie       if (!region || region->region_number != i)
1793c87b03e5Sespie 	continue;
1794c87b03e5Sespie 
1795c87b03e5Sespie       /* If there is no RESX, or it has been deleted by flow, there's
1796c87b03e5Sespie 	 nothing to fix up.  */
1797c87b03e5Sespie       if (! region->resume || INSN_DELETED_P (region->resume))
1798c87b03e5Sespie 	continue;
1799c87b03e5Sespie 
1800c87b03e5Sespie       /* Search for another landing pad in this function.  */
1801c87b03e5Sespie       for (outer = region->outer; outer ; outer = outer->outer)
1802c87b03e5Sespie 	if (outer->post_landing_pad)
1803c87b03e5Sespie 	  break;
1804c87b03e5Sespie 
1805c87b03e5Sespie       start_sequence ();
1806c87b03e5Sespie 
1807c87b03e5Sespie       if (outer)
1808c87b03e5Sespie 	emit_jump (outer->post_landing_pad);
1809c87b03e5Sespie       else
1810c87b03e5Sespie 	emit_library_call (unwind_resume_libfunc, LCT_THROW,
1811c87b03e5Sespie 			   VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1812c87b03e5Sespie 
1813c87b03e5Sespie       seq = get_insns ();
1814c87b03e5Sespie       end_sequence ();
1815c87b03e5Sespie       emit_insn_before (seq, region->resume);
1816c87b03e5Sespie       delete_insn (region->resume);
1817c87b03e5Sespie     }
1818c87b03e5Sespie }
1819c87b03e5Sespie 
1820c87b03e5Sespie 
1821c87b03e5Sespie static void
dw2_build_landing_pads()1822c87b03e5Sespie dw2_build_landing_pads ()
1823c87b03e5Sespie {
1824c87b03e5Sespie   int i;
1825c87b03e5Sespie   unsigned int j;
1826c87b03e5Sespie 
1827c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
1828c87b03e5Sespie     {
1829c87b03e5Sespie       struct eh_region *region = cfun->eh->region_array[i];
1830c87b03e5Sespie       rtx seq;
1831c87b03e5Sespie       bool clobbers_hard_regs = false;
1832c87b03e5Sespie 
1833c87b03e5Sespie       /* Mind we don't process a region more than once.  */
1834c87b03e5Sespie       if (!region || region->region_number != i)
1835c87b03e5Sespie 	continue;
1836c87b03e5Sespie 
1837c87b03e5Sespie       if (region->type != ERT_CLEANUP
1838c87b03e5Sespie 	  && region->type != ERT_TRY
1839c87b03e5Sespie 	  && region->type != ERT_ALLOWED_EXCEPTIONS)
1840c87b03e5Sespie 	continue;
1841c87b03e5Sespie 
1842c87b03e5Sespie       start_sequence ();
1843c87b03e5Sespie 
1844c87b03e5Sespie       region->landing_pad = gen_label_rtx ();
1845c87b03e5Sespie       emit_label (region->landing_pad);
1846c87b03e5Sespie 
1847c87b03e5Sespie #ifdef HAVE_exception_receiver
1848c87b03e5Sespie       if (HAVE_exception_receiver)
1849c87b03e5Sespie 	emit_insn (gen_exception_receiver ());
1850c87b03e5Sespie       else
1851c87b03e5Sespie #endif
1852c87b03e5Sespie #ifdef HAVE_nonlocal_goto_receiver
1853c87b03e5Sespie 	if (HAVE_nonlocal_goto_receiver)
1854c87b03e5Sespie 	  emit_insn (gen_nonlocal_goto_receiver ());
1855c87b03e5Sespie 	else
1856c87b03e5Sespie #endif
1857c87b03e5Sespie 	  { /* Nothing */ }
1858c87b03e5Sespie 
1859c87b03e5Sespie       /* If the eh_return data registers are call-saved, then we
1860c87b03e5Sespie 	 won't have considered them clobbered from the call that
1861c87b03e5Sespie 	 threw.  Kill them now.  */
1862c87b03e5Sespie       for (j = 0; ; ++j)
1863c87b03e5Sespie 	{
1864c87b03e5Sespie 	  unsigned r = EH_RETURN_DATA_REGNO (j);
1865c87b03e5Sespie 	  if (r == INVALID_REGNUM)
1866c87b03e5Sespie 	    break;
1867c87b03e5Sespie 	  if (! call_used_regs[r])
1868c87b03e5Sespie 	    {
1869c87b03e5Sespie 	      emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1870c87b03e5Sespie 	      clobbers_hard_regs = true;
1871c87b03e5Sespie 	    }
1872c87b03e5Sespie 	}
1873c87b03e5Sespie 
1874c87b03e5Sespie       if (clobbers_hard_regs)
1875c87b03e5Sespie 	{
1876c87b03e5Sespie 	  /* @@@ This is a kludge.  Not all machine descriptions define a
1877c87b03e5Sespie 	     blockage insn, but we must not allow the code we just generated
1878c87b03e5Sespie 	     to be reordered by scheduling.  So emit an ASM_INPUT to act as
1879c87b03e5Sespie 	     blockage insn.  */
1880c87b03e5Sespie 	  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1881c87b03e5Sespie 	}
1882c87b03e5Sespie 
1883c87b03e5Sespie       emit_move_insn (cfun->eh->exc_ptr,
1884c87b03e5Sespie 		      gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1885c87b03e5Sespie       emit_move_insn (cfun->eh->filter,
1886c87b03e5Sespie 		      gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1887c87b03e5Sespie 
1888c87b03e5Sespie       seq = get_insns ();
1889c87b03e5Sespie       end_sequence ();
1890c87b03e5Sespie 
1891c87b03e5Sespie       emit_insn_before (seq, region->post_landing_pad);
1892c87b03e5Sespie     }
1893c87b03e5Sespie }
1894c87b03e5Sespie 
1895c87b03e5Sespie 
1896c87b03e5Sespie struct sjlj_lp_info
1897c87b03e5Sespie {
1898c87b03e5Sespie   int directly_reachable;
1899c87b03e5Sespie   int action_index;
1900c87b03e5Sespie   int dispatch_index;
1901c87b03e5Sespie   int call_site_index;
1902c87b03e5Sespie };
1903c87b03e5Sespie 
1904c87b03e5Sespie static bool
sjlj_find_directly_reachable_regions(lp_info)1905c87b03e5Sespie sjlj_find_directly_reachable_regions (lp_info)
1906c87b03e5Sespie      struct sjlj_lp_info *lp_info;
1907c87b03e5Sespie {
1908c87b03e5Sespie   rtx insn;
1909c87b03e5Sespie   bool found_one = false;
1910c87b03e5Sespie 
1911c87b03e5Sespie   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1912c87b03e5Sespie     {
1913c87b03e5Sespie       struct eh_region *region;
1914c87b03e5Sespie       enum reachable_code rc;
1915c87b03e5Sespie       tree type_thrown;
1916c87b03e5Sespie       rtx note;
1917c87b03e5Sespie 
1918c87b03e5Sespie       if (! INSN_P (insn))
1919c87b03e5Sespie 	continue;
1920c87b03e5Sespie 
1921c87b03e5Sespie       note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1922c87b03e5Sespie       if (!note || INTVAL (XEXP (note, 0)) <= 0)
1923c87b03e5Sespie 	continue;
1924c87b03e5Sespie 
1925c87b03e5Sespie       region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1926c87b03e5Sespie 
1927c87b03e5Sespie       type_thrown = NULL_TREE;
1928c87b03e5Sespie       if (region->type == ERT_THROW)
1929c87b03e5Sespie 	{
1930c87b03e5Sespie 	  type_thrown = region->u.throw.type;
1931c87b03e5Sespie 	  region = region->outer;
1932c87b03e5Sespie 	}
1933c87b03e5Sespie 
1934c87b03e5Sespie       /* Find the first containing region that might handle the exception.
1935c87b03e5Sespie 	 That's the landing pad to which we will transfer control.  */
1936c87b03e5Sespie       rc = RNL_NOT_CAUGHT;
1937c87b03e5Sespie       for (; region; region = region->outer)
1938c87b03e5Sespie 	{
1939c87b03e5Sespie 	  rc = reachable_next_level (region, type_thrown, 0);
1940c87b03e5Sespie 	  if (rc != RNL_NOT_CAUGHT)
1941c87b03e5Sespie 	    break;
1942c87b03e5Sespie 	}
1943c87b03e5Sespie       if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1944c87b03e5Sespie 	{
1945c87b03e5Sespie 	  lp_info[region->region_number].directly_reachable = 1;
1946c87b03e5Sespie 	  found_one = true;
1947c87b03e5Sespie 	}
1948c87b03e5Sespie     }
1949c87b03e5Sespie 
1950c87b03e5Sespie   return found_one;
1951c87b03e5Sespie }
1952c87b03e5Sespie 
1953c87b03e5Sespie static void
sjlj_assign_call_site_values(dispatch_label,lp_info)1954c87b03e5Sespie sjlj_assign_call_site_values (dispatch_label, lp_info)
1955c87b03e5Sespie      rtx dispatch_label;
1956c87b03e5Sespie      struct sjlj_lp_info *lp_info;
1957c87b03e5Sespie {
1958c87b03e5Sespie   htab_t ar_hash;
1959c87b03e5Sespie   int i, index;
1960c87b03e5Sespie 
1961c87b03e5Sespie   /* First task: build the action table.  */
1962c87b03e5Sespie 
1963c87b03e5Sespie   VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1964c87b03e5Sespie   ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1965c87b03e5Sespie 
1966c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
1967c87b03e5Sespie     if (lp_info[i].directly_reachable)
1968c87b03e5Sespie       {
1969c87b03e5Sespie 	struct eh_region *r = cfun->eh->region_array[i];
1970c87b03e5Sespie 	r->landing_pad = dispatch_label;
1971c87b03e5Sespie 	lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1972c87b03e5Sespie 	if (lp_info[i].action_index != -1)
1973c87b03e5Sespie 	  cfun->uses_eh_lsda = 1;
1974c87b03e5Sespie       }
1975c87b03e5Sespie 
1976c87b03e5Sespie   htab_delete (ar_hash);
1977c87b03e5Sespie 
1978c87b03e5Sespie   /* Next: assign dispatch values.  In dwarf2 terms, this would be the
1979c87b03e5Sespie      landing pad label for the region.  For sjlj though, there is one
1980c87b03e5Sespie      common landing pad from which we dispatch to the post-landing pads.
1981c87b03e5Sespie 
1982c87b03e5Sespie      A region receives a dispatch index if it is directly reachable
1983c87b03e5Sespie      and requires in-function processing.  Regions that share post-landing
1984c87b03e5Sespie      pads may share dispatch indices.  */
1985c87b03e5Sespie   /* ??? Post-landing pad sharing doesn't actually happen at the moment
1986c87b03e5Sespie      (see build_post_landing_pads) so we don't bother checking for it.  */
1987c87b03e5Sespie 
1988c87b03e5Sespie   index = 0;
1989c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
1990c87b03e5Sespie     if (lp_info[i].directly_reachable)
1991c87b03e5Sespie       lp_info[i].dispatch_index = index++;
1992c87b03e5Sespie 
1993c87b03e5Sespie   /* Finally: assign call-site values.  If dwarf2 terms, this would be
1994c87b03e5Sespie      the region number assigned by convert_to_eh_region_ranges, but
1995c87b03e5Sespie      handles no-action and must-not-throw differently.  */
1996c87b03e5Sespie 
1997c87b03e5Sespie   call_site_base = 1;
1998c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
1999c87b03e5Sespie     if (lp_info[i].directly_reachable)
2000c87b03e5Sespie       {
2001c87b03e5Sespie 	int action = lp_info[i].action_index;
2002c87b03e5Sespie 
2003c87b03e5Sespie 	/* Map must-not-throw to otherwise unused call-site index 0.  */
2004c87b03e5Sespie 	if (action == -2)
2005c87b03e5Sespie 	  index = 0;
2006c87b03e5Sespie 	/* Map no-action to otherwise unused call-site index -1.  */
2007c87b03e5Sespie 	else if (action == -1)
2008c87b03e5Sespie 	  index = -1;
2009c87b03e5Sespie 	/* Otherwise, look it up in the table.  */
2010c87b03e5Sespie 	else
2011c87b03e5Sespie 	  index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2012c87b03e5Sespie 
2013c87b03e5Sespie 	lp_info[i].call_site_index = index;
2014c87b03e5Sespie       }
2015c87b03e5Sespie }
2016c87b03e5Sespie 
2017c87b03e5Sespie static void
sjlj_mark_call_sites(lp_info)2018c87b03e5Sespie sjlj_mark_call_sites (lp_info)
2019c87b03e5Sespie      struct sjlj_lp_info *lp_info;
2020c87b03e5Sespie {
2021c87b03e5Sespie   int last_call_site = -2;
2022c87b03e5Sespie   rtx insn, mem;
2023c87b03e5Sespie 
2024c87b03e5Sespie   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2025c87b03e5Sespie     {
2026c87b03e5Sespie       struct eh_region *region;
2027c87b03e5Sespie       int this_call_site;
2028c87b03e5Sespie       rtx note, before, p;
2029c87b03e5Sespie 
2030c87b03e5Sespie       /* Reset value tracking at extended basic block boundaries.  */
2031c87b03e5Sespie       if (GET_CODE (insn) == CODE_LABEL)
2032c87b03e5Sespie 	last_call_site = -2;
2033c87b03e5Sespie 
2034c87b03e5Sespie       if (! INSN_P (insn))
2035c87b03e5Sespie 	continue;
2036c87b03e5Sespie 
2037c87b03e5Sespie       note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2038c87b03e5Sespie       if (!note)
2039c87b03e5Sespie 	{
2040c87b03e5Sespie 	  /* Calls (and trapping insns) without notes are outside any
2041c87b03e5Sespie 	     exception handling region in this function.  Mark them as
2042c87b03e5Sespie 	     no action.  */
2043c87b03e5Sespie 	  if (GET_CODE (insn) == CALL_INSN
2044c87b03e5Sespie 	      || (flag_non_call_exceptions
2045c87b03e5Sespie 		  && may_trap_p (PATTERN (insn))))
2046c87b03e5Sespie 	    this_call_site = -1;
2047c87b03e5Sespie 	  else
2048c87b03e5Sespie 	    continue;
2049c87b03e5Sespie 	}
2050c87b03e5Sespie       else
2051c87b03e5Sespie 	{
2052c87b03e5Sespie 	  /* Calls that are known to not throw need not be marked.  */
2053c87b03e5Sespie 	  if (INTVAL (XEXP (note, 0)) <= 0)
2054c87b03e5Sespie 	    continue;
2055c87b03e5Sespie 
2056c87b03e5Sespie 	  region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2057c87b03e5Sespie 	  this_call_site = lp_info[region->region_number].call_site_index;
2058c87b03e5Sespie 	}
2059c87b03e5Sespie 
2060c87b03e5Sespie       if (this_call_site == last_call_site)
2061c87b03e5Sespie 	continue;
2062c87b03e5Sespie 
2063c87b03e5Sespie       /* Don't separate a call from it's argument loads.  */
2064c87b03e5Sespie       before = insn;
2065c87b03e5Sespie       if (GET_CODE (insn) == CALL_INSN)
2066c87b03e5Sespie 	before = find_first_parameter_load (insn, NULL_RTX);
2067c87b03e5Sespie 
2068c87b03e5Sespie       start_sequence ();
2069c87b03e5Sespie       mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2070c87b03e5Sespie 			    sjlj_fc_call_site_ofs);
2071c87b03e5Sespie       emit_move_insn (mem, GEN_INT (this_call_site));
2072c87b03e5Sespie       p = get_insns ();
2073c87b03e5Sespie       end_sequence ();
2074c87b03e5Sespie 
2075c87b03e5Sespie       emit_insn_before (p, before);
2076c87b03e5Sespie       last_call_site = this_call_site;
2077c87b03e5Sespie     }
2078c87b03e5Sespie }
2079c87b03e5Sespie 
2080c87b03e5Sespie /* Construct the SjLj_Function_Context.  */
2081c87b03e5Sespie 
2082c87b03e5Sespie static void
sjlj_emit_function_enter(dispatch_label)2083c87b03e5Sespie sjlj_emit_function_enter (dispatch_label)
2084c87b03e5Sespie      rtx dispatch_label;
2085c87b03e5Sespie {
2086c87b03e5Sespie   rtx fn_begin, fc, mem, seq;
2087c87b03e5Sespie 
2088c87b03e5Sespie   fc = cfun->eh->sjlj_fc;
2089c87b03e5Sespie 
2090c87b03e5Sespie   start_sequence ();
2091c87b03e5Sespie 
2092c87b03e5Sespie   /* We're storing this libcall's address into memory instead of
2093c87b03e5Sespie      calling it directly.  Thus, we must call assemble_external_libcall
2094c87b03e5Sespie      here, as we can not depend on emit_library_call to do it for us.  */
2095c87b03e5Sespie   assemble_external_libcall (eh_personality_libfunc);
2096c87b03e5Sespie   mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2097c87b03e5Sespie   emit_move_insn (mem, eh_personality_libfunc);
2098c87b03e5Sespie 
2099c87b03e5Sespie   mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2100c87b03e5Sespie   if (cfun->uses_eh_lsda)
2101c87b03e5Sespie     {
2102c87b03e5Sespie       char buf[20];
2103c87b03e5Sespie       ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2104c87b03e5Sespie       emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2105c87b03e5Sespie     }
2106c87b03e5Sespie   else
2107c87b03e5Sespie     emit_move_insn (mem, const0_rtx);
2108c87b03e5Sespie 
2109c87b03e5Sespie #ifdef DONT_USE_BUILTIN_SETJMP
2110c87b03e5Sespie   {
2111c87b03e5Sespie     rtx x, note;
2112c87b03e5Sespie     x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2113c87b03e5Sespie 				 TYPE_MODE (integer_type_node), 1,
2114c87b03e5Sespie 				 plus_constant (XEXP (fc, 0),
2115c87b03e5Sespie 						sjlj_fc_jbuf_ofs), Pmode);
2116c87b03e5Sespie 
2117c87b03e5Sespie     note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2118c87b03e5Sespie     NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2119c87b03e5Sespie 
2120c87b03e5Sespie     emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2121c87b03e5Sespie 			     TYPE_MODE (integer_type_node), 0, dispatch_label);
2122c87b03e5Sespie   }
2123c87b03e5Sespie #else
2124c87b03e5Sespie   expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2125c87b03e5Sespie 			       dispatch_label);
2126c87b03e5Sespie #endif
2127c87b03e5Sespie 
2128c87b03e5Sespie   emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2129c87b03e5Sespie 		     1, XEXP (fc, 0), Pmode);
2130c87b03e5Sespie 
2131c87b03e5Sespie   seq = get_insns ();
2132c87b03e5Sespie   end_sequence ();
2133c87b03e5Sespie 
2134c87b03e5Sespie   /* ??? Instead of doing this at the beginning of the function,
2135c87b03e5Sespie      do this in a block that is at loop level 0 and dominates all
2136c87b03e5Sespie      can_throw_internal instructions.  */
2137c87b03e5Sespie 
2138c87b03e5Sespie   for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2139c87b03e5Sespie     if (GET_CODE (fn_begin) == NOTE
2140c87b03e5Sespie 	&& NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2141c87b03e5Sespie       break;
2142c87b03e5Sespie   emit_insn_after (seq, fn_begin);
2143c87b03e5Sespie }
2144c87b03e5Sespie 
2145c87b03e5Sespie /* Call back from expand_function_end to know where we should put
2146c87b03e5Sespie    the call to unwind_sjlj_unregister_libfunc if needed.  */
2147c87b03e5Sespie 
2148c87b03e5Sespie void
sjlj_emit_function_exit_after(after)2149c87b03e5Sespie sjlj_emit_function_exit_after (after)
2150c87b03e5Sespie      rtx after;
2151c87b03e5Sespie {
2152c87b03e5Sespie   cfun->eh->sjlj_exit_after = after;
2153c87b03e5Sespie }
2154c87b03e5Sespie 
2155c87b03e5Sespie static void
sjlj_emit_function_exit()2156c87b03e5Sespie sjlj_emit_function_exit ()
2157c87b03e5Sespie {
2158c87b03e5Sespie   rtx seq;
2159c87b03e5Sespie 
2160c87b03e5Sespie   start_sequence ();
2161c87b03e5Sespie 
2162c87b03e5Sespie   emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2163c87b03e5Sespie 		     1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2164c87b03e5Sespie 
2165c87b03e5Sespie   seq = get_insns ();
2166c87b03e5Sespie   end_sequence ();
2167c87b03e5Sespie 
2168c87b03e5Sespie   /* ??? Really this can be done in any block at loop level 0 that
2169c87b03e5Sespie      post-dominates all can_throw_internal instructions.  This is
2170c87b03e5Sespie      the last possible moment.  */
2171c87b03e5Sespie 
2172c87b03e5Sespie   emit_insn_after (seq, cfun->eh->sjlj_exit_after);
2173c87b03e5Sespie }
2174c87b03e5Sespie 
2175c87b03e5Sespie static void
sjlj_emit_dispatch_table(dispatch_label,lp_info)2176c87b03e5Sespie sjlj_emit_dispatch_table (dispatch_label, lp_info)
2177c87b03e5Sespie      rtx dispatch_label;
2178c87b03e5Sespie      struct sjlj_lp_info *lp_info;
2179c87b03e5Sespie {
2180c87b03e5Sespie   int i, first_reachable;
2181c87b03e5Sespie   rtx mem, dispatch, seq, fc;
2182c87b03e5Sespie 
2183c87b03e5Sespie   fc = cfun->eh->sjlj_fc;
2184c87b03e5Sespie 
2185c87b03e5Sespie   start_sequence ();
2186c87b03e5Sespie 
2187c87b03e5Sespie   emit_label (dispatch_label);
2188c87b03e5Sespie 
2189c87b03e5Sespie #ifndef DONT_USE_BUILTIN_SETJMP
2190c87b03e5Sespie   expand_builtin_setjmp_receiver (dispatch_label);
2191c87b03e5Sespie #endif
2192c87b03e5Sespie 
2193c87b03e5Sespie   /* Load up dispatch index, exc_ptr and filter values from the
2194c87b03e5Sespie      function context.  */
2195c87b03e5Sespie   mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2196c87b03e5Sespie 			sjlj_fc_call_site_ofs);
2197c87b03e5Sespie   dispatch = copy_to_reg (mem);
2198c87b03e5Sespie 
2199c87b03e5Sespie   mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2200c87b03e5Sespie   if (word_mode != Pmode)
2201c87b03e5Sespie     {
2202c87b03e5Sespie #ifdef POINTERS_EXTEND_UNSIGNED
2203c87b03e5Sespie       mem = convert_memory_address (Pmode, mem);
2204c87b03e5Sespie #else
2205c87b03e5Sespie       mem = convert_to_mode (Pmode, mem, 0);
2206c87b03e5Sespie #endif
2207c87b03e5Sespie     }
2208c87b03e5Sespie   emit_move_insn (cfun->eh->exc_ptr, mem);
2209c87b03e5Sespie 
2210c87b03e5Sespie   mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2211c87b03e5Sespie   emit_move_insn (cfun->eh->filter, mem);
2212c87b03e5Sespie 
2213c87b03e5Sespie   /* Jump to one of the directly reachable regions.  */
2214c87b03e5Sespie   /* ??? This really ought to be using a switch statement.  */
2215c87b03e5Sespie 
2216c87b03e5Sespie   first_reachable = 0;
2217c87b03e5Sespie   for (i = cfun->eh->last_region_number; i > 0; --i)
2218c87b03e5Sespie     {
2219c87b03e5Sespie       if (! lp_info[i].directly_reachable)
2220c87b03e5Sespie 	continue;
2221c87b03e5Sespie 
2222c87b03e5Sespie       if (! first_reachable)
2223c87b03e5Sespie 	{
2224c87b03e5Sespie 	  first_reachable = i;
2225c87b03e5Sespie 	  continue;
2226c87b03e5Sespie 	}
2227c87b03e5Sespie 
2228c87b03e5Sespie       emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2229c87b03e5Sespie 			       EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2230c87b03e5Sespie 			       cfun->eh->region_array[i]->post_landing_pad);
2231c87b03e5Sespie     }
2232c87b03e5Sespie 
2233c87b03e5Sespie   seq = get_insns ();
2234c87b03e5Sespie   end_sequence ();
2235c87b03e5Sespie 
2236c87b03e5Sespie   emit_insn_before (seq, (cfun->eh->region_array[first_reachable]
2237c87b03e5Sespie 			  ->post_landing_pad));
2238c87b03e5Sespie }
2239c87b03e5Sespie 
2240c87b03e5Sespie static void
sjlj_build_landing_pads()2241c87b03e5Sespie sjlj_build_landing_pads ()
2242c87b03e5Sespie {
2243c87b03e5Sespie   struct sjlj_lp_info *lp_info;
2244c87b03e5Sespie 
2245c87b03e5Sespie   lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2246c87b03e5Sespie 					     sizeof (struct sjlj_lp_info));
2247c87b03e5Sespie 
2248c87b03e5Sespie   if (sjlj_find_directly_reachable_regions (lp_info))
2249c87b03e5Sespie     {
2250c87b03e5Sespie       rtx dispatch_label = gen_label_rtx ();
2251c87b03e5Sespie 
2252c87b03e5Sespie       cfun->eh->sjlj_fc
2253c87b03e5Sespie 	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2254c87b03e5Sespie 			      int_size_in_bytes (sjlj_fc_type_node),
2255c87b03e5Sespie 			      TYPE_ALIGN (sjlj_fc_type_node));
2256c87b03e5Sespie 
2257c87b03e5Sespie       sjlj_assign_call_site_values (dispatch_label, lp_info);
2258c87b03e5Sespie       sjlj_mark_call_sites (lp_info);
2259c87b03e5Sespie 
2260c87b03e5Sespie       sjlj_emit_function_enter (dispatch_label);
2261c87b03e5Sespie       sjlj_emit_dispatch_table (dispatch_label, lp_info);
2262c87b03e5Sespie       sjlj_emit_function_exit ();
2263c87b03e5Sespie     }
2264c87b03e5Sespie 
2265c87b03e5Sespie   free (lp_info);
2266c87b03e5Sespie }
2267c87b03e5Sespie 
2268c87b03e5Sespie void
finish_eh_generation()2269c87b03e5Sespie finish_eh_generation ()
2270c87b03e5Sespie {
2271c87b03e5Sespie   /* Nothing to do if no regions created.  */
2272c87b03e5Sespie   if (cfun->eh->region_tree == NULL)
2273c87b03e5Sespie     return;
2274c87b03e5Sespie 
2275c87b03e5Sespie   /* The object here is to provide find_basic_blocks with detailed
2276c87b03e5Sespie      information (via reachable_handlers) on how exception control
2277c87b03e5Sespie      flows within the function.  In this first pass, we can include
2278c87b03e5Sespie      type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2279c87b03e5Sespie      regions, and hope that it will be useful in deleting unreachable
2280c87b03e5Sespie      handlers.  Subsequently, we will generate landing pads which will
2281c87b03e5Sespie      connect many of the handlers, and then type information will not
2282c87b03e5Sespie      be effective.  Still, this is a win over previous implementations.  */
2283c87b03e5Sespie 
2284c87b03e5Sespie   cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2285c87b03e5Sespie 
2286c87b03e5Sespie   /* These registers are used by the landing pads.  Make sure they
2287c87b03e5Sespie      have been generated.  */
2288c87b03e5Sespie   get_exception_pointer (cfun);
2289c87b03e5Sespie   get_exception_filter (cfun);
2290c87b03e5Sespie 
2291c87b03e5Sespie   /* Construct the landing pads.  */
2292c87b03e5Sespie 
2293c87b03e5Sespie   assign_filter_values ();
2294c87b03e5Sespie   build_post_landing_pads ();
2295c87b03e5Sespie   connect_post_landing_pads ();
2296c87b03e5Sespie   if (USING_SJLJ_EXCEPTIONS)
2297c87b03e5Sespie     sjlj_build_landing_pads ();
2298c87b03e5Sespie   else
2299c87b03e5Sespie     dw2_build_landing_pads ();
2300c87b03e5Sespie 
2301c87b03e5Sespie   cfun->eh->built_landing_pads = 1;
2302c87b03e5Sespie 
2303c87b03e5Sespie   /* We've totally changed the CFG.  Start over.  */
2304c87b03e5Sespie   find_exception_handler_labels ();
2305c87b03e5Sespie   rebuild_jump_labels (get_insns ());
2306c87b03e5Sespie   find_basic_blocks (get_insns (), max_reg_num (), 0);
2307c87b03e5Sespie   cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2308c87b03e5Sespie }
2309c87b03e5Sespie 
2310c87b03e5Sespie static hashval_t
ehl_hash(pentry)2311c87b03e5Sespie ehl_hash (pentry)
2312c87b03e5Sespie      const PTR pentry;
2313c87b03e5Sespie {
2314c87b03e5Sespie   struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2315c87b03e5Sespie 
2316c87b03e5Sespie   /* 2^32 * ((sqrt(5) - 1) / 2) */
2317c87b03e5Sespie   const hashval_t scaled_golden_ratio = 0x9e3779b9;
2318c87b03e5Sespie   return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2319c87b03e5Sespie }
2320c87b03e5Sespie 
2321c87b03e5Sespie static int
ehl_eq(pentry,pdata)2322c87b03e5Sespie ehl_eq (pentry, pdata)
2323c87b03e5Sespie      const PTR pentry;
2324c87b03e5Sespie      const PTR pdata;
2325c87b03e5Sespie {
2326c87b03e5Sespie   struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2327c87b03e5Sespie   struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2328c87b03e5Sespie 
2329c87b03e5Sespie   return entry->label == data->label;
2330c87b03e5Sespie }
2331c87b03e5Sespie 
2332c87b03e5Sespie /* This section handles removing dead code for flow.  */
2333c87b03e5Sespie 
2334c87b03e5Sespie /* Remove LABEL from exception_handler_label_map.  */
2335c87b03e5Sespie 
2336c87b03e5Sespie static void
remove_exception_handler_label(label)2337c87b03e5Sespie remove_exception_handler_label (label)
2338c87b03e5Sespie      rtx label;
2339c87b03e5Sespie {
2340c87b03e5Sespie   struct ehl_map_entry **slot, tmp;
2341c87b03e5Sespie 
2342c87b03e5Sespie   /* If exception_handler_label_map was not built yet,
2343c87b03e5Sespie      there is nothing to do.  */
2344c87b03e5Sespie   if (cfun->eh->exception_handler_label_map == NULL)
2345c87b03e5Sespie     return;
2346c87b03e5Sespie 
2347c87b03e5Sespie   tmp.label = label;
2348c87b03e5Sespie   slot = (struct ehl_map_entry **)
2349c87b03e5Sespie     htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2350c87b03e5Sespie   if (! slot)
2351c87b03e5Sespie     abort ();
2352c87b03e5Sespie 
2353c87b03e5Sespie   htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2354c87b03e5Sespie }
2355c87b03e5Sespie 
2356c87b03e5Sespie /* Splice REGION from the region tree etc.  */
2357c87b03e5Sespie 
2358c87b03e5Sespie static void
remove_eh_handler(region)2359c87b03e5Sespie remove_eh_handler (region)
2360c87b03e5Sespie      struct eh_region *region;
2361c87b03e5Sespie {
2362c87b03e5Sespie   struct eh_region **pp, **pp_start, *p, *outer, *inner;
2363c87b03e5Sespie   rtx lab;
2364c87b03e5Sespie 
2365c87b03e5Sespie   /* For the benefit of efficiently handling REG_EH_REGION notes,
2366c87b03e5Sespie      replace this region in the region array with its containing
2367c87b03e5Sespie      region.  Note that previous region deletions may result in
2368c87b03e5Sespie      multiple copies of this region in the array, so we have a
2369c87b03e5Sespie      list of alternate numbers by which we are known.  */
2370c87b03e5Sespie 
2371c87b03e5Sespie   outer = region->outer;
2372c87b03e5Sespie   cfun->eh->region_array[region->region_number] = outer;
2373c87b03e5Sespie   if (region->aka)
2374c87b03e5Sespie     {
2375c87b03e5Sespie       int i;
2376c87b03e5Sespie       EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
2377c87b03e5Sespie 	{ cfun->eh->region_array[i] = outer; });
2378c87b03e5Sespie     }
2379c87b03e5Sespie 
2380c87b03e5Sespie   if (outer)
2381c87b03e5Sespie     {
2382c87b03e5Sespie       if (!outer->aka)
2383c87b03e5Sespie         outer->aka = BITMAP_GGC_ALLOC ();
2384c87b03e5Sespie       if (region->aka)
2385c87b03e5Sespie 	bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2386c87b03e5Sespie       bitmap_set_bit (outer->aka, region->region_number);
2387c87b03e5Sespie     }
2388c87b03e5Sespie 
2389c87b03e5Sespie   if (cfun->eh->built_landing_pads)
2390c87b03e5Sespie     lab = region->landing_pad;
2391c87b03e5Sespie   else
2392c87b03e5Sespie     lab = region->label;
2393c87b03e5Sespie   if (lab)
2394c87b03e5Sespie     remove_exception_handler_label (lab);
2395c87b03e5Sespie 
2396c87b03e5Sespie   if (outer)
2397c87b03e5Sespie     pp_start = &outer->inner;
2398c87b03e5Sespie   else
2399c87b03e5Sespie     pp_start = &cfun->eh->region_tree;
2400c87b03e5Sespie   for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2401c87b03e5Sespie     continue;
2402c87b03e5Sespie   *pp = region->next_peer;
2403c87b03e5Sespie 
2404c87b03e5Sespie   inner = region->inner;
2405c87b03e5Sespie   if (inner)
2406c87b03e5Sespie     {
2407c87b03e5Sespie       for (p = inner; p->next_peer ; p = p->next_peer)
2408c87b03e5Sespie 	p->outer = outer;
2409c87b03e5Sespie       p->outer = outer;
2410c87b03e5Sespie 
2411c87b03e5Sespie       p->next_peer = *pp_start;
2412c87b03e5Sespie       *pp_start = inner;
2413c87b03e5Sespie     }
2414c87b03e5Sespie 
2415c87b03e5Sespie   if (region->type == ERT_CATCH)
2416c87b03e5Sespie     {
2417c87b03e5Sespie       struct eh_region *try, *next, *prev;
2418c87b03e5Sespie 
2419c87b03e5Sespie       for (try = region->next_peer;
2420c87b03e5Sespie 	   try->type == ERT_CATCH;
2421c87b03e5Sespie 	   try = try->next_peer)
2422c87b03e5Sespie 	continue;
2423c87b03e5Sespie       if (try->type != ERT_TRY)
2424c87b03e5Sespie 	abort ();
2425c87b03e5Sespie 
2426c87b03e5Sespie       next = region->u.catch.next_catch;
2427c87b03e5Sespie       prev = region->u.catch.prev_catch;
2428c87b03e5Sespie 
2429c87b03e5Sespie       if (next)
2430c87b03e5Sespie 	next->u.catch.prev_catch = prev;
2431c87b03e5Sespie       else
2432c87b03e5Sespie 	try->u.try.last_catch = prev;
2433c87b03e5Sespie       if (prev)
2434c87b03e5Sespie 	prev->u.catch.next_catch = next;
2435c87b03e5Sespie       else
2436c87b03e5Sespie 	{
2437c87b03e5Sespie 	  try->u.try.catch = next;
2438c87b03e5Sespie 	  if (! next)
2439c87b03e5Sespie 	    remove_eh_handler (try);
2440c87b03e5Sespie 	}
2441c87b03e5Sespie     }
2442c87b03e5Sespie }
2443c87b03e5Sespie 
2444c87b03e5Sespie /* LABEL heads a basic block that is about to be deleted.  If this
2445c87b03e5Sespie    label corresponds to an exception region, we may be able to
2446c87b03e5Sespie    delete the region.  */
2447c87b03e5Sespie 
2448c87b03e5Sespie void
maybe_remove_eh_handler(label)2449c87b03e5Sespie maybe_remove_eh_handler (label)
2450c87b03e5Sespie      rtx label;
2451c87b03e5Sespie {
2452c87b03e5Sespie   struct ehl_map_entry **slot, tmp;
2453c87b03e5Sespie   struct eh_region *region;
2454c87b03e5Sespie 
2455c87b03e5Sespie   /* ??? After generating landing pads, it's not so simple to determine
2456c87b03e5Sespie      if the region data is completely unused.  One must examine the
2457c87b03e5Sespie      landing pad and the post landing pad, and whether an inner try block
2458c87b03e5Sespie      is referencing the catch handlers directly.  */
2459c87b03e5Sespie   if (cfun->eh->built_landing_pads)
2460c87b03e5Sespie     return;
2461c87b03e5Sespie 
2462c87b03e5Sespie   tmp.label = label;
2463c87b03e5Sespie   slot = (struct ehl_map_entry **)
2464c87b03e5Sespie     htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2465c87b03e5Sespie   if (! slot)
2466c87b03e5Sespie     return;
2467c87b03e5Sespie   region = (*slot)->region;
2468c87b03e5Sespie   if (! region)
2469c87b03e5Sespie     return;
2470c87b03e5Sespie 
2471c87b03e5Sespie   /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2472c87b03e5Sespie      because there is no path to the fallback call to terminate.
2473c87b03e5Sespie      But the region continues to affect call-site data until there
2474c87b03e5Sespie      are no more contained calls, which we don't see here.  */
2475c87b03e5Sespie   if (region->type == ERT_MUST_NOT_THROW)
2476c87b03e5Sespie     {
2477c87b03e5Sespie       htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2478c87b03e5Sespie       region->label = NULL_RTX;
2479c87b03e5Sespie     }
2480c87b03e5Sespie   else
2481c87b03e5Sespie     remove_eh_handler (region);
2482c87b03e5Sespie }
2483c87b03e5Sespie 
2484c87b03e5Sespie /* Invokes CALLBACK for every exception handler label.  Only used by old
2485c87b03e5Sespie    loop hackery; should not be used by new code.  */
2486c87b03e5Sespie 
2487c87b03e5Sespie void
2488c87b03e5Sespie for_each_eh_label (callback)
2489c87b03e5Sespie      void (*callback) PARAMS ((rtx));
2490c87b03e5Sespie {
2491c87b03e5Sespie   htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2492c87b03e5Sespie 		 (void *)callback);
2493c87b03e5Sespie }
2494c87b03e5Sespie 
2495c87b03e5Sespie static int
for_each_eh_label_1(pentry,data)2496c87b03e5Sespie for_each_eh_label_1 (pentry, data)
2497c87b03e5Sespie      PTR *pentry;
2498c87b03e5Sespie      PTR data;
2499c87b03e5Sespie {
2500c87b03e5Sespie   struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2501c87b03e5Sespie   void (*callback) PARAMS ((rtx)) = (void (*) PARAMS ((rtx))) data;
2502c87b03e5Sespie 
2503c87b03e5Sespie   (*callback) (entry->label);
2504c87b03e5Sespie   return 1;
2505c87b03e5Sespie }
2506c87b03e5Sespie 
2507c87b03e5Sespie /* This section describes CFG exception edges for flow.  */
2508c87b03e5Sespie 
2509c87b03e5Sespie /* For communicating between calls to reachable_next_level.  */
2510c87b03e5Sespie struct reachable_info GTY(())
2511c87b03e5Sespie {
2512c87b03e5Sespie   tree types_caught;
2513c87b03e5Sespie   tree types_allowed;
2514c87b03e5Sespie   rtx handlers;
2515c87b03e5Sespie };
2516c87b03e5Sespie 
2517c87b03e5Sespie /* A subroutine of reachable_next_level.  Return true if TYPE, or a
2518c87b03e5Sespie    base class of TYPE, is in HANDLED.  */
2519c87b03e5Sespie 
2520c87b03e5Sespie static int
check_handled(handled,type)2521c87b03e5Sespie check_handled (handled, type)
2522c87b03e5Sespie      tree handled, type;
2523c87b03e5Sespie {
2524c87b03e5Sespie   tree t;
2525c87b03e5Sespie 
2526c87b03e5Sespie   /* We can check for exact matches without front-end help.  */
2527c87b03e5Sespie   if (! lang_eh_type_covers)
2528c87b03e5Sespie     {
2529c87b03e5Sespie       for (t = handled; t ; t = TREE_CHAIN (t))
2530c87b03e5Sespie 	if (TREE_VALUE (t) == type)
2531c87b03e5Sespie 	  return 1;
2532c87b03e5Sespie     }
2533c87b03e5Sespie   else
2534c87b03e5Sespie     {
2535c87b03e5Sespie       for (t = handled; t ; t = TREE_CHAIN (t))
2536c87b03e5Sespie 	if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2537c87b03e5Sespie 	  return 1;
2538c87b03e5Sespie     }
2539c87b03e5Sespie 
2540c87b03e5Sespie   return 0;
2541c87b03e5Sespie }
2542c87b03e5Sespie 
2543c87b03e5Sespie /* A subroutine of reachable_next_level.  If we are collecting a list
2544c87b03e5Sespie    of handlers, add one.  After landing pad generation, reference
2545c87b03e5Sespie    it instead of the handlers themselves.  Further, the handlers are
2546c87b03e5Sespie    all wired together, so by referencing one, we've got them all.
2547c87b03e5Sespie    Before landing pad generation we reference each handler individually.
2548c87b03e5Sespie 
2549c87b03e5Sespie    LP_REGION contains the landing pad; REGION is the handler.  */
2550c87b03e5Sespie 
2551c87b03e5Sespie static void
add_reachable_handler(info,lp_region,region)2552c87b03e5Sespie add_reachable_handler (info, lp_region, region)
2553c87b03e5Sespie      struct reachable_info *info;
2554c87b03e5Sespie      struct eh_region *lp_region;
2555c87b03e5Sespie      struct eh_region *region;
2556c87b03e5Sespie {
2557c87b03e5Sespie   if (! info)
2558c87b03e5Sespie     return;
2559c87b03e5Sespie 
2560c87b03e5Sespie   if (cfun->eh->built_landing_pads)
2561c87b03e5Sespie     {
2562c87b03e5Sespie       if (! info->handlers)
2563c87b03e5Sespie 	info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2564c87b03e5Sespie     }
2565c87b03e5Sespie   else
2566c87b03e5Sespie     info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2567c87b03e5Sespie }
2568c87b03e5Sespie 
2569c87b03e5Sespie /* Process one level of exception regions for reachability.
2570c87b03e5Sespie    If TYPE_THROWN is non-null, then it is the *exact* type being
2571c87b03e5Sespie    propagated.  If INFO is non-null, then collect handler labels
2572c87b03e5Sespie    and caught/allowed type information between invocations.  */
2573c87b03e5Sespie 
2574c87b03e5Sespie static enum reachable_code
reachable_next_level(region,type_thrown,info)2575c87b03e5Sespie reachable_next_level (region, type_thrown, info)
2576c87b03e5Sespie      struct eh_region *region;
2577c87b03e5Sespie      tree type_thrown;
2578c87b03e5Sespie      struct reachable_info *info;
2579c87b03e5Sespie {
2580c87b03e5Sespie   switch (region->type)
2581c87b03e5Sespie     {
2582c87b03e5Sespie     case ERT_CLEANUP:
2583c87b03e5Sespie       /* Before landing-pad generation, we model control flow
2584c87b03e5Sespie 	 directly to the individual handlers.  In this way we can
2585c87b03e5Sespie 	 see that catch handler types may shadow one another.  */
2586c87b03e5Sespie       add_reachable_handler (info, region, region);
2587c87b03e5Sespie       return RNL_MAYBE_CAUGHT;
2588c87b03e5Sespie 
2589c87b03e5Sespie     case ERT_TRY:
2590c87b03e5Sespie       {
2591c87b03e5Sespie 	struct eh_region *c;
2592c87b03e5Sespie 	enum reachable_code ret = RNL_NOT_CAUGHT;
2593c87b03e5Sespie 
2594c87b03e5Sespie 	for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2595c87b03e5Sespie 	  {
2596c87b03e5Sespie 	    /* A catch-all handler ends the search.  */
2597c87b03e5Sespie 	    if (c->u.catch.type_list == NULL)
2598c87b03e5Sespie 	      {
2599c87b03e5Sespie 		add_reachable_handler (info, region, c);
2600c87b03e5Sespie 		return RNL_CAUGHT;
2601c87b03e5Sespie 	      }
2602c87b03e5Sespie 
2603c87b03e5Sespie 	    if (type_thrown)
2604c87b03e5Sespie 	      {
2605c87b03e5Sespie 		/* If we have at least one type match, end the search.  */
2606c87b03e5Sespie 		tree tp_node = c->u.catch.type_list;
2607c87b03e5Sespie 
2608c87b03e5Sespie 		for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2609c87b03e5Sespie 		  {
2610c87b03e5Sespie 		    tree type = TREE_VALUE (tp_node);
2611c87b03e5Sespie 
2612c87b03e5Sespie 		    if (type == type_thrown
2613c87b03e5Sespie 			|| (lang_eh_type_covers
2614c87b03e5Sespie 			    && (*lang_eh_type_covers) (type, type_thrown)))
2615c87b03e5Sespie 		      {
2616c87b03e5Sespie 			add_reachable_handler (info, region, c);
2617c87b03e5Sespie 			return RNL_CAUGHT;
2618c87b03e5Sespie 		      }
2619c87b03e5Sespie 		  }
2620c87b03e5Sespie 
2621c87b03e5Sespie 		/* If we have definitive information of a match failure,
2622c87b03e5Sespie 		   the catch won't trigger.  */
2623c87b03e5Sespie 		if (lang_eh_type_covers)
2624c87b03e5Sespie 		  return RNL_NOT_CAUGHT;
2625c87b03e5Sespie 	      }
2626c87b03e5Sespie 
2627c87b03e5Sespie 	    /* At this point, we either don't know what type is thrown or
2628c87b03e5Sespie 	       don't have front-end assistance to help deciding if it is
2629c87b03e5Sespie 	       covered by one of the types in the list for this region.
2630c87b03e5Sespie 
2631c87b03e5Sespie 	       We'd then like to add this region to the list of reachable
2632c87b03e5Sespie 	       handlers since it is indeed potentially reachable based on the
2633c87b03e5Sespie 	       information we have.
2634c87b03e5Sespie 
2635c87b03e5Sespie 	       Actually, this handler is for sure not reachable if all the
2636c87b03e5Sespie 	       types it matches have already been caught. That is, it is only
2637c87b03e5Sespie 	       potentially reachable if at least one of the types it catches
2638c87b03e5Sespie 	       has not been previously caught.  */
2639c87b03e5Sespie 
2640c87b03e5Sespie 	    if (! info)
2641c87b03e5Sespie 	      ret = RNL_MAYBE_CAUGHT;
2642c87b03e5Sespie 	    else
2643c87b03e5Sespie 	      {
2644c87b03e5Sespie 		tree tp_node = c->u.catch.type_list;
2645c87b03e5Sespie 		bool maybe_reachable = false;
2646c87b03e5Sespie 
2647c87b03e5Sespie 		/* Compute the potential reachability of this handler and
2648c87b03e5Sespie 		   update the list of types caught at the same time.  */
2649c87b03e5Sespie 		for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2650c87b03e5Sespie 		  {
2651c87b03e5Sespie 		    tree type = TREE_VALUE (tp_node);
2652c87b03e5Sespie 
2653c87b03e5Sespie 		    if (! check_handled (info->types_caught, type))
2654c87b03e5Sespie 		      {
2655c87b03e5Sespie 			info->types_caught
2656c87b03e5Sespie 			  = tree_cons (NULL, type, info->types_caught);
2657c87b03e5Sespie 
2658c87b03e5Sespie 			maybe_reachable = true;
2659c87b03e5Sespie 		      }
2660c87b03e5Sespie 		  }
2661c87b03e5Sespie 
2662c87b03e5Sespie 		if (maybe_reachable)
2663c87b03e5Sespie 		  {
2664c87b03e5Sespie 		    add_reachable_handler (info, region, c);
2665c87b03e5Sespie 
2666c87b03e5Sespie 		    /* ??? If the catch type is a base class of every allowed
2667c87b03e5Sespie 		       type, then we know we can stop the search.  */
2668c87b03e5Sespie 		    ret = RNL_MAYBE_CAUGHT;
2669c87b03e5Sespie 		  }
2670c87b03e5Sespie 	      }
2671c87b03e5Sespie 	  }
2672c87b03e5Sespie 
2673c87b03e5Sespie 	return ret;
2674c87b03e5Sespie       }
2675c87b03e5Sespie 
2676c87b03e5Sespie     case ERT_ALLOWED_EXCEPTIONS:
2677c87b03e5Sespie       /* An empty list of types definitely ends the search.  */
2678c87b03e5Sespie       if (region->u.allowed.type_list == NULL_TREE)
2679c87b03e5Sespie 	{
2680c87b03e5Sespie 	  add_reachable_handler (info, region, region);
2681c87b03e5Sespie 	  return RNL_CAUGHT;
2682c87b03e5Sespie 	}
2683c87b03e5Sespie 
2684c87b03e5Sespie       /* Collect a list of lists of allowed types for use in detecting
2685c87b03e5Sespie 	 when a catch may be transformed into a catch-all.  */
2686c87b03e5Sespie       if (info)
2687c87b03e5Sespie 	info->types_allowed = tree_cons (NULL_TREE,
2688c87b03e5Sespie 					 region->u.allowed.type_list,
2689c87b03e5Sespie 					 info->types_allowed);
2690c87b03e5Sespie 
2691c87b03e5Sespie       /* If we have definitive information about the type hierarchy,
2692c87b03e5Sespie 	 then we can tell if the thrown type will pass through the
2693c87b03e5Sespie 	 filter.  */
2694c87b03e5Sespie       if (type_thrown && lang_eh_type_covers)
2695c87b03e5Sespie 	{
2696c87b03e5Sespie 	  if (check_handled (region->u.allowed.type_list, type_thrown))
2697c87b03e5Sespie 	    return RNL_NOT_CAUGHT;
2698c87b03e5Sespie 	  else
2699c87b03e5Sespie 	    {
2700c87b03e5Sespie 	      add_reachable_handler (info, region, region);
2701c87b03e5Sespie 	      return RNL_CAUGHT;
2702c87b03e5Sespie 	    }
2703c87b03e5Sespie 	}
2704c87b03e5Sespie 
2705c87b03e5Sespie       add_reachable_handler (info, region, region);
2706c87b03e5Sespie       return RNL_MAYBE_CAUGHT;
2707c87b03e5Sespie 
2708c87b03e5Sespie     case ERT_CATCH:
2709c87b03e5Sespie       /* Catch regions are handled by their controling try region.  */
2710c87b03e5Sespie       return RNL_NOT_CAUGHT;
2711c87b03e5Sespie 
2712c87b03e5Sespie     case ERT_MUST_NOT_THROW:
2713c87b03e5Sespie       /* Here we end our search, since no exceptions may propagate.
2714c87b03e5Sespie 	 If we've touched down at some landing pad previous, then the
2715c87b03e5Sespie 	 explicit function call we generated may be used.  Otherwise
2716c87b03e5Sespie 	 the call is made by the runtime.  */
2717c87b03e5Sespie       if (info && info->handlers)
2718c87b03e5Sespie 	{
2719c87b03e5Sespie 	  add_reachable_handler (info, region, region);
2720c87b03e5Sespie 	  return RNL_CAUGHT;
2721c87b03e5Sespie 	}
2722c87b03e5Sespie       else
2723c87b03e5Sespie 	return RNL_BLOCKED;
2724c87b03e5Sespie 
2725c87b03e5Sespie     case ERT_THROW:
2726c87b03e5Sespie     case ERT_FIXUP:
2727c87b03e5Sespie     case ERT_UNKNOWN:
2728c87b03e5Sespie       /* Shouldn't see these here.  */
2729c87b03e5Sespie       break;
2730c87b03e5Sespie     }
2731c87b03e5Sespie 
2732c87b03e5Sespie   abort ();
2733c87b03e5Sespie }
2734c87b03e5Sespie 
2735c87b03e5Sespie /* Retrieve a list of labels of exception handlers which can be
2736c87b03e5Sespie    reached by a given insn.  */
2737c87b03e5Sespie 
2738c87b03e5Sespie rtx
reachable_handlers(insn)2739c87b03e5Sespie reachable_handlers (insn)
2740c87b03e5Sespie      rtx insn;
2741c87b03e5Sespie {
2742c87b03e5Sespie   struct reachable_info info;
2743c87b03e5Sespie   struct eh_region *region;
2744c87b03e5Sespie   tree type_thrown;
2745c87b03e5Sespie   int region_number;
2746c87b03e5Sespie 
2747c87b03e5Sespie   if (GET_CODE (insn) == JUMP_INSN
2748c87b03e5Sespie       && GET_CODE (PATTERN (insn)) == RESX)
2749c87b03e5Sespie     region_number = XINT (PATTERN (insn), 0);
2750c87b03e5Sespie   else
2751c87b03e5Sespie     {
2752c87b03e5Sespie       rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2753c87b03e5Sespie       if (!note || INTVAL (XEXP (note, 0)) <= 0)
2754c87b03e5Sespie 	return NULL;
2755c87b03e5Sespie       region_number = INTVAL (XEXP (note, 0));
2756c87b03e5Sespie     }
2757c87b03e5Sespie 
2758c87b03e5Sespie   memset (&info, 0, sizeof (info));
2759c87b03e5Sespie 
2760c87b03e5Sespie   region = cfun->eh->region_array[region_number];
2761c87b03e5Sespie 
2762c87b03e5Sespie   type_thrown = NULL_TREE;
2763c87b03e5Sespie   if (GET_CODE (insn) == JUMP_INSN
2764c87b03e5Sespie       && GET_CODE (PATTERN (insn)) == RESX)
2765c87b03e5Sespie     {
2766c87b03e5Sespie       /* A RESX leaves a region instead of entering it.  Thus the
2767c87b03e5Sespie 	 region itself may have been deleted out from under us.  */
2768c87b03e5Sespie       if (region == NULL)
2769c87b03e5Sespie 	return NULL;
2770c87b03e5Sespie       region = region->outer;
2771c87b03e5Sespie     }
2772c87b03e5Sespie   else if (region->type == ERT_THROW)
2773c87b03e5Sespie     {
2774c87b03e5Sespie       type_thrown = region->u.throw.type;
2775c87b03e5Sespie       region = region->outer;
2776c87b03e5Sespie     }
2777c87b03e5Sespie 
2778c87b03e5Sespie   while (region)
2779c87b03e5Sespie     {
2780c87b03e5Sespie       if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2781c87b03e5Sespie 	break;
2782c87b03e5Sespie       /* If we have processed one cleanup, there is no point in
2783c87b03e5Sespie 	 processing any more of them.  Each cleanup will have an edge
2784c87b03e5Sespie 	 to the next outer cleanup region, so the flow graph will be
2785c87b03e5Sespie 	 accurate.  */
2786c87b03e5Sespie       if (region->type == ERT_CLEANUP)
2787c87b03e5Sespie 	region = region->u.cleanup.prev_try;
2788c87b03e5Sespie       else
2789c87b03e5Sespie 	region = region->outer;
2790c87b03e5Sespie     }
2791c87b03e5Sespie 
2792c87b03e5Sespie   return info.handlers;
2793c87b03e5Sespie }
2794c87b03e5Sespie 
2795c87b03e5Sespie /* Determine if the given INSN can throw an exception that is caught
2796c87b03e5Sespie    within the function.  */
2797c87b03e5Sespie 
2798c87b03e5Sespie bool
can_throw_internal(insn)2799c87b03e5Sespie can_throw_internal (insn)
2800c87b03e5Sespie      rtx insn;
2801c87b03e5Sespie {
2802c87b03e5Sespie   struct eh_region *region;
2803c87b03e5Sespie   tree type_thrown;
2804c87b03e5Sespie   rtx note;
2805c87b03e5Sespie 
2806c87b03e5Sespie   if (! INSN_P (insn))
2807c87b03e5Sespie     return false;
2808c87b03e5Sespie 
2809c87b03e5Sespie   if (GET_CODE (insn) == INSN
2810c87b03e5Sespie       && GET_CODE (PATTERN (insn)) == SEQUENCE)
2811c87b03e5Sespie     insn = XVECEXP (PATTERN (insn), 0, 0);
2812c87b03e5Sespie 
2813c87b03e5Sespie   if (GET_CODE (insn) == CALL_INSN
2814c87b03e5Sespie       && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2815c87b03e5Sespie     {
2816c87b03e5Sespie       int i;
2817c87b03e5Sespie       for (i = 0; i < 3; ++i)
2818c87b03e5Sespie 	{
2819c87b03e5Sespie 	  rtx sub = XEXP (PATTERN (insn), i);
2820c87b03e5Sespie 	  for (; sub ; sub = NEXT_INSN (sub))
2821c87b03e5Sespie 	    if (can_throw_internal (sub))
2822c87b03e5Sespie 	      return true;
2823c87b03e5Sespie 	}
2824c87b03e5Sespie       return false;
2825c87b03e5Sespie     }
2826c87b03e5Sespie 
2827c87b03e5Sespie   /* Every insn that might throw has an EH_REGION note.  */
2828c87b03e5Sespie   note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2829c87b03e5Sespie   if (!note || INTVAL (XEXP (note, 0)) <= 0)
2830c87b03e5Sespie     return false;
2831c87b03e5Sespie 
2832c87b03e5Sespie   region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2833c87b03e5Sespie 
2834c87b03e5Sespie   type_thrown = NULL_TREE;
2835c87b03e5Sespie   if (region->type == ERT_THROW)
2836c87b03e5Sespie     {
2837c87b03e5Sespie       type_thrown = region->u.throw.type;
2838c87b03e5Sespie       region = region->outer;
2839c87b03e5Sespie     }
2840c87b03e5Sespie 
2841c87b03e5Sespie   /* If this exception is ignored by each and every containing region,
2842c87b03e5Sespie      then control passes straight out.  The runtime may handle some
2843c87b03e5Sespie      regions, which also do not require processing internally.  */
2844c87b03e5Sespie   for (; region; region = region->outer)
2845c87b03e5Sespie     {
2846c87b03e5Sespie       enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2847c87b03e5Sespie       if (how == RNL_BLOCKED)
2848c87b03e5Sespie 	return false;
2849c87b03e5Sespie       if (how != RNL_NOT_CAUGHT)
2850c87b03e5Sespie 	return true;
2851c87b03e5Sespie     }
2852c87b03e5Sespie 
2853c87b03e5Sespie   return false;
2854c87b03e5Sespie }
2855c87b03e5Sespie 
2856c87b03e5Sespie /* Determine if the given INSN can throw an exception that is
2857c87b03e5Sespie    visible outside the function.  */
2858c87b03e5Sespie 
2859c87b03e5Sespie bool
can_throw_external(insn)2860c87b03e5Sespie can_throw_external (insn)
2861c87b03e5Sespie      rtx insn;
2862c87b03e5Sespie {
2863c87b03e5Sespie   struct eh_region *region;
2864c87b03e5Sespie   tree type_thrown;
2865c87b03e5Sespie   rtx note;
2866c87b03e5Sespie 
2867c87b03e5Sespie   if (! INSN_P (insn))
2868c87b03e5Sespie     return false;
2869c87b03e5Sespie 
2870c87b03e5Sespie   if (GET_CODE (insn) == INSN
2871c87b03e5Sespie       && GET_CODE (PATTERN (insn)) == SEQUENCE)
2872c87b03e5Sespie     insn = XVECEXP (PATTERN (insn), 0, 0);
2873c87b03e5Sespie 
2874c87b03e5Sespie   if (GET_CODE (insn) == CALL_INSN
2875c87b03e5Sespie       && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2876c87b03e5Sespie     {
2877c87b03e5Sespie       int i;
2878c87b03e5Sespie       for (i = 0; i < 3; ++i)
2879c87b03e5Sespie 	{
2880c87b03e5Sespie 	  rtx sub = XEXP (PATTERN (insn), i);
2881c87b03e5Sespie 	  for (; sub ; sub = NEXT_INSN (sub))
2882c87b03e5Sespie 	    if (can_throw_external (sub))
2883c87b03e5Sespie 	      return true;
2884c87b03e5Sespie 	}
2885c87b03e5Sespie       return false;
2886c87b03e5Sespie     }
2887c87b03e5Sespie 
2888c87b03e5Sespie   note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2889c87b03e5Sespie   if (!note)
2890c87b03e5Sespie     {
2891c87b03e5Sespie       /* Calls (and trapping insns) without notes are outside any
2892c87b03e5Sespie 	 exception handling region in this function.  We have to
2893c87b03e5Sespie 	 assume it might throw.  Given that the front end and middle
2894c87b03e5Sespie 	 ends mark known NOTHROW functions, this isn't so wildly
2895c87b03e5Sespie 	 inaccurate.  */
2896c87b03e5Sespie       return (GET_CODE (insn) == CALL_INSN
2897c87b03e5Sespie 	      || (flag_non_call_exceptions
2898c87b03e5Sespie 		  && may_trap_p (PATTERN (insn))));
2899c87b03e5Sespie     }
2900c87b03e5Sespie   if (INTVAL (XEXP (note, 0)) <= 0)
2901c87b03e5Sespie     return false;
2902c87b03e5Sespie 
2903c87b03e5Sespie   region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2904c87b03e5Sespie 
2905c87b03e5Sespie   type_thrown = NULL_TREE;
2906c87b03e5Sespie   if (region->type == ERT_THROW)
2907c87b03e5Sespie     {
2908c87b03e5Sespie       type_thrown = region->u.throw.type;
2909c87b03e5Sespie       region = region->outer;
2910c87b03e5Sespie     }
2911c87b03e5Sespie 
2912c87b03e5Sespie   /* If the exception is caught or blocked by any containing region,
2913c87b03e5Sespie      then it is not seen by any calling function.  */
2914c87b03e5Sespie   for (; region ; region = region->outer)
2915c87b03e5Sespie     if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2916c87b03e5Sespie       return false;
2917c87b03e5Sespie 
2918c87b03e5Sespie   return true;
2919c87b03e5Sespie }
2920c87b03e5Sespie 
2921c87b03e5Sespie /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls.  */
2922c87b03e5Sespie 
2923c87b03e5Sespie void
set_nothrow_function_flags()2924c87b03e5Sespie set_nothrow_function_flags ()
2925c87b03e5Sespie {
2926c87b03e5Sespie   rtx insn;
2927c87b03e5Sespie 
2928c87b03e5Sespie   current_function_nothrow = 1;
2929c87b03e5Sespie 
2930c87b03e5Sespie   /* Assume cfun->all_throwers_are_sibcalls until we encounter
2931c87b03e5Sespie      something that can throw an exception.  We specifically exempt
2932c87b03e5Sespie      CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2933c87b03e5Sespie      and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
2934c87b03e5Sespie      is optimistic.  */
2935c87b03e5Sespie 
2936c87b03e5Sespie   cfun->all_throwers_are_sibcalls = 1;
2937c87b03e5Sespie 
2938c87b03e5Sespie   if (! flag_exceptions)
2939c87b03e5Sespie     return;
2940c87b03e5Sespie 
2941c87b03e5Sespie   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2942c87b03e5Sespie     if (can_throw_external (insn))
2943c87b03e5Sespie       {
2944c87b03e5Sespie 	current_function_nothrow = 0;
2945c87b03e5Sespie 
2946c87b03e5Sespie 	if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2947c87b03e5Sespie 	  {
2948c87b03e5Sespie 	    cfun->all_throwers_are_sibcalls = 0;
2949c87b03e5Sespie 	    return;
2950c87b03e5Sespie 	  }
2951c87b03e5Sespie       }
2952c87b03e5Sespie 
2953c87b03e5Sespie   for (insn = current_function_epilogue_delay_list; insn;
2954c87b03e5Sespie        insn = XEXP (insn, 1))
2955c87b03e5Sespie     if (can_throw_external (insn))
2956c87b03e5Sespie       {
2957c87b03e5Sespie 	current_function_nothrow = 0;
2958c87b03e5Sespie 
2959c87b03e5Sespie 	if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2960c87b03e5Sespie 	  {
2961c87b03e5Sespie 	    cfun->all_throwers_are_sibcalls = 0;
2962c87b03e5Sespie 	    return;
2963c87b03e5Sespie 	  }
2964c87b03e5Sespie       }
2965c87b03e5Sespie }
2966c87b03e5Sespie 
2967c87b03e5Sespie 
2968c87b03e5Sespie /* Various hooks for unwind library.  */
2969c87b03e5Sespie 
2970c87b03e5Sespie /* Do any necessary initialization to access arbitrary stack frames.
2971c87b03e5Sespie    On the SPARC, this means flushing the register windows.  */
2972c87b03e5Sespie 
2973c87b03e5Sespie void
expand_builtin_unwind_init()2974c87b03e5Sespie expand_builtin_unwind_init ()
2975c87b03e5Sespie {
2976c87b03e5Sespie   /* Set this so all the registers get saved in our frame; we need to be
2977c87b03e5Sespie      able to copy the saved values for any registers from frames we unwind.  */
2978c87b03e5Sespie   current_function_has_nonlocal_label = 1;
2979c87b03e5Sespie 
2980c87b03e5Sespie #ifdef SETUP_FRAME_ADDRESSES
2981c87b03e5Sespie   SETUP_FRAME_ADDRESSES ();
2982c87b03e5Sespie #endif
2983c87b03e5Sespie }
2984c87b03e5Sespie 
2985c87b03e5Sespie rtx
expand_builtin_eh_return_data_regno(arglist)2986c87b03e5Sespie expand_builtin_eh_return_data_regno (arglist)
2987c87b03e5Sespie      tree arglist;
2988c87b03e5Sespie {
2989c87b03e5Sespie   tree which = TREE_VALUE (arglist);
2990c87b03e5Sespie   unsigned HOST_WIDE_INT iwhich;
2991c87b03e5Sespie 
2992c87b03e5Sespie   if (TREE_CODE (which) != INTEGER_CST)
2993c87b03e5Sespie     {
2994c87b03e5Sespie       error ("argument of `__builtin_eh_return_regno' must be constant");
2995c87b03e5Sespie       return constm1_rtx;
2996c87b03e5Sespie     }
2997c87b03e5Sespie 
2998c87b03e5Sespie   iwhich = tree_low_cst (which, 1);
2999c87b03e5Sespie   iwhich = EH_RETURN_DATA_REGNO (iwhich);
3000c87b03e5Sespie   if (iwhich == INVALID_REGNUM)
3001c87b03e5Sespie     return constm1_rtx;
3002c87b03e5Sespie 
3003c87b03e5Sespie #ifdef DWARF_FRAME_REGNUM
3004c87b03e5Sespie   iwhich = DWARF_FRAME_REGNUM (iwhich);
3005c87b03e5Sespie #else
3006c87b03e5Sespie   iwhich = DBX_REGISTER_NUMBER (iwhich);
3007c87b03e5Sespie #endif
3008c87b03e5Sespie 
3009c87b03e5Sespie   return GEN_INT (iwhich);
3010c87b03e5Sespie }
3011c87b03e5Sespie 
3012c87b03e5Sespie /* Given a value extracted from the return address register or stack slot,
3013c87b03e5Sespie    return the actual address encoded in that value.  */
3014c87b03e5Sespie 
3015c87b03e5Sespie rtx
expand_builtin_extract_return_addr(addr_tree)3016c87b03e5Sespie expand_builtin_extract_return_addr (addr_tree)
3017c87b03e5Sespie      tree addr_tree;
3018c87b03e5Sespie {
3019c87b03e5Sespie   rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3020c87b03e5Sespie 
3021c87b03e5Sespie   if (GET_MODE (addr) != Pmode
3022c87b03e5Sespie       && GET_MODE (addr) != VOIDmode)
3023c87b03e5Sespie     {
3024c87b03e5Sespie #ifdef POINTERS_EXTEND_UNSIGNED
3025c87b03e5Sespie       addr = convert_memory_address (Pmode, addr);
3026c87b03e5Sespie #else
3027c87b03e5Sespie       addr = convert_to_mode (Pmode, addr, 0);
3028c87b03e5Sespie #endif
3029c87b03e5Sespie     }
3030c87b03e5Sespie 
3031c87b03e5Sespie   /* First mask out any unwanted bits.  */
3032c87b03e5Sespie #ifdef MASK_RETURN_ADDR
3033c87b03e5Sespie   expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3034c87b03e5Sespie #endif
3035c87b03e5Sespie 
3036c87b03e5Sespie   /* Then adjust to find the real return address.  */
3037c87b03e5Sespie #if defined (RETURN_ADDR_OFFSET)
3038c87b03e5Sespie   addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3039c87b03e5Sespie #endif
3040c87b03e5Sespie 
3041c87b03e5Sespie   return addr;
3042c87b03e5Sespie }
3043c87b03e5Sespie 
3044c87b03e5Sespie /* Given an actual address in addr_tree, do any necessary encoding
3045c87b03e5Sespie    and return the value to be stored in the return address register or
3046c87b03e5Sespie    stack slot so the epilogue will return to that address.  */
3047c87b03e5Sespie 
3048c87b03e5Sespie rtx
expand_builtin_frob_return_addr(addr_tree)3049c87b03e5Sespie expand_builtin_frob_return_addr (addr_tree)
3050c87b03e5Sespie      tree addr_tree;
3051c87b03e5Sespie {
3052c87b03e5Sespie   rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3053c87b03e5Sespie 
3054c87b03e5Sespie #ifdef POINTERS_EXTEND_UNSIGNED
3055c87b03e5Sespie   if (GET_MODE (addr) != Pmode)
3056c87b03e5Sespie     addr = convert_memory_address (Pmode, addr);
3057c87b03e5Sespie #endif
3058c87b03e5Sespie 
3059c87b03e5Sespie #ifdef RETURN_ADDR_OFFSET
3060c87b03e5Sespie   addr = force_reg (Pmode, addr);
3061c87b03e5Sespie   addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3062c87b03e5Sespie #endif
3063c87b03e5Sespie 
3064c87b03e5Sespie   return addr;
3065c87b03e5Sespie }
3066c87b03e5Sespie 
3067c87b03e5Sespie /* Set up the epilogue with the magic bits we'll need to return to the
3068c87b03e5Sespie    exception handler.  */
3069c87b03e5Sespie 
3070c87b03e5Sespie void
expand_builtin_eh_return(stackadj_tree,handler_tree)3071c87b03e5Sespie expand_builtin_eh_return (stackadj_tree, handler_tree)
3072c87b03e5Sespie     tree stackadj_tree ATTRIBUTE_UNUSED;
3073c87b03e5Sespie     tree handler_tree;
3074c87b03e5Sespie {
3075c87b03e5Sespie   rtx tmp;
3076c87b03e5Sespie 
3077c87b03e5Sespie #ifdef EH_RETURN_STACKADJ_RTX
3078c87b03e5Sespie   tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3079c87b03e5Sespie #ifdef POINTERS_EXTEND_UNSIGNED
3080c87b03e5Sespie   if (GET_MODE (tmp) != Pmode)
3081c87b03e5Sespie     tmp = convert_memory_address (Pmode, tmp);
3082c87b03e5Sespie #endif
3083c87b03e5Sespie   if (!cfun->eh->ehr_stackadj)
3084c87b03e5Sespie     cfun->eh->ehr_stackadj = copy_to_reg (tmp);
3085c87b03e5Sespie   else if (tmp != cfun->eh->ehr_stackadj)
3086c87b03e5Sespie     emit_move_insn (cfun->eh->ehr_stackadj, tmp);
3087c87b03e5Sespie #endif
3088c87b03e5Sespie 
3089c87b03e5Sespie   tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3090c87b03e5Sespie #ifdef POINTERS_EXTEND_UNSIGNED
3091c87b03e5Sespie   if (GET_MODE (tmp) != Pmode)
3092c87b03e5Sespie     tmp = convert_memory_address (Pmode, tmp);
3093c87b03e5Sespie #endif
3094c87b03e5Sespie   if (!cfun->eh->ehr_handler)
3095c87b03e5Sespie     cfun->eh->ehr_handler = copy_to_reg (tmp);
3096c87b03e5Sespie   else if (tmp != cfun->eh->ehr_handler)
3097c87b03e5Sespie     emit_move_insn (cfun->eh->ehr_handler, tmp);
3098c87b03e5Sespie 
3099c87b03e5Sespie   if (!cfun->eh->ehr_label)
3100c87b03e5Sespie     cfun->eh->ehr_label = gen_label_rtx ();
3101c87b03e5Sespie   emit_jump (cfun->eh->ehr_label);
3102c87b03e5Sespie }
3103c87b03e5Sespie 
3104c87b03e5Sespie void
expand_eh_return()3105c87b03e5Sespie expand_eh_return ()
3106c87b03e5Sespie {
3107c87b03e5Sespie   rtx around_label;
3108c87b03e5Sespie 
3109c87b03e5Sespie   if (! cfun->eh->ehr_label)
3110c87b03e5Sespie     return;
3111c87b03e5Sespie 
3112c87b03e5Sespie   current_function_calls_eh_return = 1;
3113c87b03e5Sespie 
3114c87b03e5Sespie #ifdef EH_RETURN_STACKADJ_RTX
3115c87b03e5Sespie   emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3116c87b03e5Sespie #endif
3117c87b03e5Sespie 
3118c87b03e5Sespie   around_label = gen_label_rtx ();
3119c87b03e5Sespie   emit_jump (around_label);
3120c87b03e5Sespie 
3121c87b03e5Sespie   emit_label (cfun->eh->ehr_label);
3122c87b03e5Sespie   clobber_return_register ();
3123c87b03e5Sespie 
3124c87b03e5Sespie #ifdef EH_RETURN_STACKADJ_RTX
3125c87b03e5Sespie   emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3126c87b03e5Sespie #endif
3127c87b03e5Sespie 
3128c87b03e5Sespie #ifdef HAVE_eh_return
3129c87b03e5Sespie   if (HAVE_eh_return)
3130c87b03e5Sespie     emit_insn (gen_eh_return (cfun->eh->ehr_handler));
3131c87b03e5Sespie   else
3132c87b03e5Sespie #endif
3133c87b03e5Sespie     {
3134c87b03e5Sespie #ifdef EH_RETURN_HANDLER_RTX
3135c87b03e5Sespie       emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3136c87b03e5Sespie #else
3137c87b03e5Sespie       error ("__builtin_eh_return not supported on this target");
3138c87b03e5Sespie #endif
3139c87b03e5Sespie     }
3140c87b03e5Sespie 
3141c87b03e5Sespie   emit_label (around_label);
3142c87b03e5Sespie }
3143c87b03e5Sespie 
3144c87b03e5Sespie /* In the following functions, we represent entries in the action table
3145c87b03e5Sespie    as 1-based indices.  Special cases are:
3146c87b03e5Sespie 
3147c87b03e5Sespie 	 0:	null action record, non-null landing pad; implies cleanups
3148c87b03e5Sespie 	-1:	null action record, null landing pad; implies no action
3149c87b03e5Sespie 	-2:	no call-site entry; implies must_not_throw
3150c87b03e5Sespie 	-3:	we have yet to process outer regions
3151c87b03e5Sespie 
3152c87b03e5Sespie    Further, no special cases apply to the "next" field of the record.
3153c87b03e5Sespie    For next, 0 means end of list.  */
3154c87b03e5Sespie 
3155c87b03e5Sespie struct action_record
3156c87b03e5Sespie {
3157c87b03e5Sespie   int offset;
3158c87b03e5Sespie   int filter;
3159c87b03e5Sespie   int next;
3160c87b03e5Sespie };
3161c87b03e5Sespie 
3162c87b03e5Sespie static int
action_record_eq(pentry,pdata)3163c87b03e5Sespie action_record_eq (pentry, pdata)
3164c87b03e5Sespie      const PTR pentry;
3165c87b03e5Sespie      const PTR pdata;
3166c87b03e5Sespie {
3167c87b03e5Sespie   const struct action_record *entry = (const struct action_record *) pentry;
3168c87b03e5Sespie   const struct action_record *data = (const struct action_record *) pdata;
3169c87b03e5Sespie   return entry->filter == data->filter && entry->next == data->next;
3170c87b03e5Sespie }
3171c87b03e5Sespie 
3172c87b03e5Sespie static hashval_t
action_record_hash(pentry)3173c87b03e5Sespie action_record_hash (pentry)
3174c87b03e5Sespie      const PTR pentry;
3175c87b03e5Sespie {
3176c87b03e5Sespie   const struct action_record *entry = (const struct action_record *) pentry;
3177c87b03e5Sespie   return entry->next * 1009 + entry->filter;
3178c87b03e5Sespie }
3179c87b03e5Sespie 
3180c87b03e5Sespie static int
add_action_record(ar_hash,filter,next)3181c87b03e5Sespie add_action_record (ar_hash, filter, next)
3182c87b03e5Sespie      htab_t ar_hash;
3183c87b03e5Sespie      int filter, next;
3184c87b03e5Sespie {
3185c87b03e5Sespie   struct action_record **slot, *new, tmp;
3186c87b03e5Sespie 
3187c87b03e5Sespie   tmp.filter = filter;
3188c87b03e5Sespie   tmp.next = next;
3189c87b03e5Sespie   slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3190c87b03e5Sespie 
3191c87b03e5Sespie   if ((new = *slot) == NULL)
3192c87b03e5Sespie     {
3193c87b03e5Sespie       new = (struct action_record *) xmalloc (sizeof (*new));
3194c87b03e5Sespie       new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3195c87b03e5Sespie       new->filter = filter;
3196c87b03e5Sespie       new->next = next;
3197c87b03e5Sespie       *slot = new;
3198c87b03e5Sespie 
3199c87b03e5Sespie       /* The filter value goes in untouched.  The link to the next
3200c87b03e5Sespie 	 record is a "self-relative" byte offset, or zero to indicate
3201c87b03e5Sespie 	 that there is no next record.  So convert the absolute 1 based
3202c87b03e5Sespie 	 indices we've been carrying around into a displacement.  */
3203c87b03e5Sespie 
3204c87b03e5Sespie       push_sleb128 (&cfun->eh->action_record_data, filter);
3205c87b03e5Sespie       if (next)
3206c87b03e5Sespie 	next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3207c87b03e5Sespie       push_sleb128 (&cfun->eh->action_record_data, next);
3208c87b03e5Sespie     }
3209c87b03e5Sespie 
3210c87b03e5Sespie   return new->offset;
3211c87b03e5Sespie }
3212c87b03e5Sespie 
3213c87b03e5Sespie static int
collect_one_action_chain(ar_hash,region)3214c87b03e5Sespie collect_one_action_chain (ar_hash, region)
3215c87b03e5Sespie      htab_t ar_hash;
3216c87b03e5Sespie      struct eh_region *region;
3217c87b03e5Sespie {
3218c87b03e5Sespie   struct eh_region *c;
3219c87b03e5Sespie   int next;
3220c87b03e5Sespie 
3221c87b03e5Sespie   /* If we've reached the top of the region chain, then we have
3222c87b03e5Sespie      no actions, and require no landing pad.  */
3223c87b03e5Sespie   if (region == NULL)
3224c87b03e5Sespie     return -1;
3225c87b03e5Sespie 
3226c87b03e5Sespie   switch (region->type)
3227c87b03e5Sespie     {
3228c87b03e5Sespie     case ERT_CLEANUP:
3229c87b03e5Sespie       /* A cleanup adds a zero filter to the beginning of the chain, but
3230c87b03e5Sespie 	 there are special cases to look out for.  If there are *only*
3231c87b03e5Sespie 	 cleanups along a path, then it compresses to a zero action.
3232c87b03e5Sespie 	 Further, if there are multiple cleanups along a path, we only
3233c87b03e5Sespie 	 need to represent one of them, as that is enough to trigger
3234c87b03e5Sespie 	 entry to the landing pad at runtime.  */
3235c87b03e5Sespie       next = collect_one_action_chain (ar_hash, region->outer);
3236c87b03e5Sespie       if (next <= 0)
3237c87b03e5Sespie 	return 0;
3238c87b03e5Sespie       for (c = region->outer; c ; c = c->outer)
3239c87b03e5Sespie 	if (c->type == ERT_CLEANUP)
3240c87b03e5Sespie 	  return next;
3241c87b03e5Sespie       return add_action_record (ar_hash, 0, next);
3242c87b03e5Sespie 
3243c87b03e5Sespie     case ERT_TRY:
3244c87b03e5Sespie       /* Process the associated catch regions in reverse order.
3245c87b03e5Sespie 	 If there's a catch-all handler, then we don't need to
3246c87b03e5Sespie 	 search outer regions.  Use a magic -3 value to record
3247c87b03e5Sespie 	 that we haven't done the outer search.  */
3248c87b03e5Sespie       next = -3;
3249c87b03e5Sespie       for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3250c87b03e5Sespie 	{
3251c87b03e5Sespie 	  if (c->u.catch.type_list == NULL)
3252c87b03e5Sespie 	    {
3253c87b03e5Sespie 	      /* Retrieve the filter from the head of the filter list
3254c87b03e5Sespie 		 where we have stored it (see assign_filter_values).  */
3255c87b03e5Sespie 	      int filter
3256c87b03e5Sespie 		= TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3257c87b03e5Sespie 
3258c87b03e5Sespie 	      next = add_action_record (ar_hash, filter, 0);
3259c87b03e5Sespie 	    }
3260c87b03e5Sespie 	  else
3261c87b03e5Sespie 	    {
3262c87b03e5Sespie 	      /* Once the outer search is done, trigger an action record for
3263c87b03e5Sespie                  each filter we have.  */
3264c87b03e5Sespie 	      tree flt_node;
3265c87b03e5Sespie 
3266c87b03e5Sespie 	      if (next == -3)
3267c87b03e5Sespie 		{
3268c87b03e5Sespie 		  next = collect_one_action_chain (ar_hash, region->outer);
3269c87b03e5Sespie 
3270c87b03e5Sespie 		  /* If there is no next action, terminate the chain.  */
3271c87b03e5Sespie 		  if (next == -1)
3272c87b03e5Sespie 		    next = 0;
3273c87b03e5Sespie 		  /* If all outer actions are cleanups or must_not_throw,
3274c87b03e5Sespie 		     we'll have no action record for it, since we had wanted
3275c87b03e5Sespie 		     to encode these states in the call-site record directly.
3276c87b03e5Sespie 		     Add a cleanup action to the chain to catch these.  */
3277c87b03e5Sespie 		  else if (next <= 0)
3278c87b03e5Sespie 		    next = add_action_record (ar_hash, 0, 0);
3279c87b03e5Sespie 		}
3280c87b03e5Sespie 
3281c87b03e5Sespie 	      flt_node = c->u.catch.filter_list;
3282c87b03e5Sespie 	      for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3283c87b03e5Sespie 		{
3284c87b03e5Sespie 		  int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3285c87b03e5Sespie 		  next = add_action_record (ar_hash, filter, next);
3286c87b03e5Sespie 		}
3287c87b03e5Sespie 	    }
3288c87b03e5Sespie 	}
3289c87b03e5Sespie       return next;
3290c87b03e5Sespie 
3291c87b03e5Sespie     case ERT_ALLOWED_EXCEPTIONS:
3292c87b03e5Sespie       /* An exception specification adds its filter to the
3293c87b03e5Sespie 	 beginning of the chain.  */
3294c87b03e5Sespie       next = collect_one_action_chain (ar_hash, region->outer);
3295*4e43c760Sespie 
3296*4e43c760Sespie       /* If there is no next action, terminate the chain.  */
3297*4e43c760Sespie       if (next == -1)
3298*4e43c760Sespie 	next = 0;
3299*4e43c760Sespie       /* If all outer actions are cleanups or must_not_throw,
3300*4e43c760Sespie 	 we'll have no action record for it, since we had wanted
3301*4e43c760Sespie 	 to encode these states in the call-site record directly.
3302*4e43c760Sespie 	 Add a cleanup action to the chain to catch these.  */
3303*4e43c760Sespie       else if (next <= 0)
3304*4e43c760Sespie 	next = add_action_record (ar_hash, 0, 0);
3305*4e43c760Sespie 
3306*4e43c760Sespie       return add_action_record (ar_hash, region->u.allowed.filter, next);
3307c87b03e5Sespie 
3308c87b03e5Sespie     case ERT_MUST_NOT_THROW:
3309c87b03e5Sespie       /* A must-not-throw region with no inner handlers or cleanups
3310c87b03e5Sespie 	 requires no call-site entry.  Note that this differs from
3311c87b03e5Sespie 	 the no handler or cleanup case in that we do require an lsda
3312c87b03e5Sespie 	 to be generated.  Return a magic -2 value to record this.  */
3313c87b03e5Sespie       return -2;
3314c87b03e5Sespie 
3315c87b03e5Sespie     case ERT_CATCH:
3316c87b03e5Sespie     case ERT_THROW:
3317c87b03e5Sespie       /* CATCH regions are handled in TRY above.  THROW regions are
3318c87b03e5Sespie 	 for optimization information only and produce no output.  */
3319c87b03e5Sespie       return collect_one_action_chain (ar_hash, region->outer);
3320c87b03e5Sespie 
3321c87b03e5Sespie     default:
3322c87b03e5Sespie       abort ();
3323c87b03e5Sespie     }
3324c87b03e5Sespie }
3325c87b03e5Sespie 
3326c87b03e5Sespie static int
add_call_site(landing_pad,action)3327c87b03e5Sespie add_call_site (landing_pad, action)
3328c87b03e5Sespie      rtx landing_pad;
3329c87b03e5Sespie      int action;
3330c87b03e5Sespie {
3331c87b03e5Sespie   struct call_site_record *data = cfun->eh->call_site_data;
3332c87b03e5Sespie   int used = cfun->eh->call_site_data_used;
3333c87b03e5Sespie   int size = cfun->eh->call_site_data_size;
3334c87b03e5Sespie 
3335c87b03e5Sespie   if (used >= size)
3336c87b03e5Sespie     {
3337c87b03e5Sespie       size = (size ? size * 2 : 64);
3338c87b03e5Sespie       data = (struct call_site_record *)
3339c87b03e5Sespie 	ggc_realloc (data, sizeof (*data) * size);
3340c87b03e5Sespie       cfun->eh->call_site_data = data;
3341c87b03e5Sespie       cfun->eh->call_site_data_size = size;
3342c87b03e5Sespie     }
3343c87b03e5Sespie 
3344c87b03e5Sespie   data[used].landing_pad = landing_pad;
3345c87b03e5Sespie   data[used].action = action;
3346c87b03e5Sespie 
3347c87b03e5Sespie   cfun->eh->call_site_data_used = used + 1;
3348c87b03e5Sespie 
3349c87b03e5Sespie   return used + call_site_base;
3350c87b03e5Sespie }
3351c87b03e5Sespie 
3352c87b03e5Sespie /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3353c87b03e5Sespie    The new note numbers will not refer to region numbers, but
3354c87b03e5Sespie    instead to call site entries.  */
3355c87b03e5Sespie 
3356c87b03e5Sespie void
convert_to_eh_region_ranges()3357c87b03e5Sespie convert_to_eh_region_ranges ()
3358c87b03e5Sespie {
3359c87b03e5Sespie   rtx insn, iter, note;
3360c87b03e5Sespie   htab_t ar_hash;
3361c87b03e5Sespie   int last_action = -3;
3362c87b03e5Sespie   rtx last_action_insn = NULL_RTX;
3363c87b03e5Sespie   rtx last_landing_pad = NULL_RTX;
3364c87b03e5Sespie   rtx first_no_action_insn = NULL_RTX;
3365c87b03e5Sespie   int call_site = 0;
3366c87b03e5Sespie 
3367c87b03e5Sespie   if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3368c87b03e5Sespie     return;
3369c87b03e5Sespie 
3370c87b03e5Sespie   VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3371c87b03e5Sespie 
3372c87b03e5Sespie   ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3373c87b03e5Sespie 
3374c87b03e5Sespie   for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3375c87b03e5Sespie     if (INSN_P (iter))
3376c87b03e5Sespie       {
3377c87b03e5Sespie 	struct eh_region *region;
3378c87b03e5Sespie 	int this_action;
3379c87b03e5Sespie 	rtx this_landing_pad;
3380c87b03e5Sespie 
3381c87b03e5Sespie 	insn = iter;
3382c87b03e5Sespie 	if (GET_CODE (insn) == INSN
3383c87b03e5Sespie 	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
3384c87b03e5Sespie 	  insn = XVECEXP (PATTERN (insn), 0, 0);
3385c87b03e5Sespie 
3386c87b03e5Sespie 	note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3387c87b03e5Sespie 	if (!note)
3388c87b03e5Sespie 	  {
3389c87b03e5Sespie 	    if (! (GET_CODE (insn) == CALL_INSN
3390c87b03e5Sespie 		   || (flag_non_call_exceptions
3391c87b03e5Sespie 		       && may_trap_p (PATTERN (insn)))))
3392c87b03e5Sespie 	      continue;
3393c87b03e5Sespie 	    this_action = -1;
3394c87b03e5Sespie 	    region = NULL;
3395c87b03e5Sespie 	  }
3396c87b03e5Sespie 	else
3397c87b03e5Sespie 	  {
3398c87b03e5Sespie 	    if (INTVAL (XEXP (note, 0)) <= 0)
3399c87b03e5Sespie 	      continue;
3400c87b03e5Sespie 	    region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3401c87b03e5Sespie 	    this_action = collect_one_action_chain (ar_hash, region);
3402c87b03e5Sespie 	  }
3403c87b03e5Sespie 
3404c87b03e5Sespie 	/* Existence of catch handlers, or must-not-throw regions
3405c87b03e5Sespie 	   implies that an lsda is needed (even if empty).  */
3406c87b03e5Sespie 	if (this_action != -1)
3407c87b03e5Sespie 	  cfun->uses_eh_lsda = 1;
3408c87b03e5Sespie 
3409c87b03e5Sespie 	/* Delay creation of region notes for no-action regions
3410c87b03e5Sespie 	   until we're sure that an lsda will be required.  */
3411c87b03e5Sespie 	else if (last_action == -3)
3412c87b03e5Sespie 	  {
3413c87b03e5Sespie 	    first_no_action_insn = iter;
3414c87b03e5Sespie 	    last_action = -1;
3415c87b03e5Sespie 	  }
3416c87b03e5Sespie 
3417c87b03e5Sespie 	/* Cleanups and handlers may share action chains but not
3418c87b03e5Sespie 	   landing pads.  Collect the landing pad for this region.  */
3419c87b03e5Sespie 	if (this_action >= 0)
3420c87b03e5Sespie 	  {
3421c87b03e5Sespie 	    struct eh_region *o;
3422c87b03e5Sespie 	    for (o = region; ! o->landing_pad ; o = o->outer)
3423c87b03e5Sespie 	      continue;
3424c87b03e5Sespie 	    this_landing_pad = o->landing_pad;
3425c87b03e5Sespie 	  }
3426c87b03e5Sespie 	else
3427c87b03e5Sespie 	  this_landing_pad = NULL_RTX;
3428c87b03e5Sespie 
3429c87b03e5Sespie 	/* Differing actions or landing pads implies a change in call-site
3430c87b03e5Sespie 	   info, which implies some EH_REGION note should be emitted.  */
3431c87b03e5Sespie 	if (last_action != this_action
3432c87b03e5Sespie 	    || last_landing_pad != this_landing_pad)
3433c87b03e5Sespie 	  {
3434c87b03e5Sespie 	    /* If we'd not seen a previous action (-3) or the previous
3435c87b03e5Sespie 	       action was must-not-throw (-2), then we do not need an
3436c87b03e5Sespie 	       end note.  */
3437c87b03e5Sespie 	    if (last_action >= -1)
3438c87b03e5Sespie 	      {
3439c87b03e5Sespie 		/* If we delayed the creation of the begin, do it now.  */
3440c87b03e5Sespie 		if (first_no_action_insn)
3441c87b03e5Sespie 		  {
3442c87b03e5Sespie 		    call_site = add_call_site (NULL_RTX, 0);
3443c87b03e5Sespie 		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3444c87b03e5Sespie 					     first_no_action_insn);
3445c87b03e5Sespie 		    NOTE_EH_HANDLER (note) = call_site;
3446c87b03e5Sespie 		    first_no_action_insn = NULL_RTX;
3447c87b03e5Sespie 		  }
3448c87b03e5Sespie 
3449c87b03e5Sespie 		note = emit_note_after (NOTE_INSN_EH_REGION_END,
3450c87b03e5Sespie 					last_action_insn);
3451c87b03e5Sespie 		NOTE_EH_HANDLER (note) = call_site;
3452c87b03e5Sespie 	      }
3453c87b03e5Sespie 
3454c87b03e5Sespie 	    /* If the new action is must-not-throw, then no region notes
3455c87b03e5Sespie 	       are created.  */
3456c87b03e5Sespie 	    if (this_action >= -1)
3457c87b03e5Sespie 	      {
3458c87b03e5Sespie 		call_site = add_call_site (this_landing_pad,
3459c87b03e5Sespie 					   this_action < 0 ? 0 : this_action);
3460c87b03e5Sespie 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3461c87b03e5Sespie 		NOTE_EH_HANDLER (note) = call_site;
3462c87b03e5Sespie 	      }
3463c87b03e5Sespie 
3464c87b03e5Sespie 	    last_action = this_action;
3465c87b03e5Sespie 	    last_landing_pad = this_landing_pad;
3466c87b03e5Sespie 	  }
3467c87b03e5Sespie 	last_action_insn = iter;
3468c87b03e5Sespie       }
3469c87b03e5Sespie 
3470c87b03e5Sespie   if (last_action >= -1 && ! first_no_action_insn)
3471c87b03e5Sespie     {
3472c87b03e5Sespie       note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3473c87b03e5Sespie       NOTE_EH_HANDLER (note) = call_site;
3474c87b03e5Sespie     }
3475c87b03e5Sespie 
3476c87b03e5Sespie   htab_delete (ar_hash);
3477c87b03e5Sespie }
3478c87b03e5Sespie 
3479c87b03e5Sespie 
3480c87b03e5Sespie static void
push_uleb128(data_area,value)3481c87b03e5Sespie push_uleb128 (data_area, value)
3482c87b03e5Sespie      varray_type *data_area;
3483c87b03e5Sespie      unsigned int value;
3484c87b03e5Sespie {
3485c87b03e5Sespie   do
3486c87b03e5Sespie     {
3487c87b03e5Sespie       unsigned char byte = value & 0x7f;
3488c87b03e5Sespie       value >>= 7;
3489c87b03e5Sespie       if (value)
3490c87b03e5Sespie 	byte |= 0x80;
3491c87b03e5Sespie       VARRAY_PUSH_UCHAR (*data_area, byte);
3492c87b03e5Sespie     }
3493c87b03e5Sespie   while (value);
3494c87b03e5Sespie }
3495c87b03e5Sespie 
3496c87b03e5Sespie static void
push_sleb128(data_area,value)3497c87b03e5Sespie push_sleb128 (data_area, value)
3498c87b03e5Sespie      varray_type *data_area;
3499c87b03e5Sespie      int value;
3500c87b03e5Sespie {
3501c87b03e5Sespie   unsigned char byte;
3502c87b03e5Sespie   int more;
3503c87b03e5Sespie 
3504c87b03e5Sespie   do
3505c87b03e5Sespie     {
3506c87b03e5Sespie       byte = value & 0x7f;
3507c87b03e5Sespie       value >>= 7;
3508c87b03e5Sespie       more = ! ((value == 0 && (byte & 0x40) == 0)
3509c87b03e5Sespie 		|| (value == -1 && (byte & 0x40) != 0));
3510c87b03e5Sespie       if (more)
3511c87b03e5Sespie 	byte |= 0x80;
3512c87b03e5Sespie       VARRAY_PUSH_UCHAR (*data_area, byte);
3513c87b03e5Sespie     }
3514c87b03e5Sespie   while (more);
3515c87b03e5Sespie }
3516c87b03e5Sespie 
3517c87b03e5Sespie 
3518c87b03e5Sespie #ifndef HAVE_AS_LEB128
3519c87b03e5Sespie static int
dw2_size_of_call_site_table()3520c87b03e5Sespie dw2_size_of_call_site_table ()
3521c87b03e5Sespie {
3522c87b03e5Sespie   int n = cfun->eh->call_site_data_used;
3523c87b03e5Sespie   int size = n * (4 + 4 + 4);
3524c87b03e5Sespie   int i;
3525c87b03e5Sespie 
3526c87b03e5Sespie   for (i = 0; i < n; ++i)
3527c87b03e5Sespie     {
3528c87b03e5Sespie       struct call_site_record *cs = &cfun->eh->call_site_data[i];
3529c87b03e5Sespie       size += size_of_uleb128 (cs->action);
3530c87b03e5Sespie     }
3531c87b03e5Sespie 
3532c87b03e5Sespie   return size;
3533c87b03e5Sespie }
3534c87b03e5Sespie 
3535c87b03e5Sespie static int
sjlj_size_of_call_site_table()3536c87b03e5Sespie sjlj_size_of_call_site_table ()
3537c87b03e5Sespie {
3538c87b03e5Sespie   int n = cfun->eh->call_site_data_used;
3539c87b03e5Sespie   int size = 0;
3540c87b03e5Sespie   int i;
3541c87b03e5Sespie 
3542c87b03e5Sespie   for (i = 0; i < n; ++i)
3543c87b03e5Sespie     {
3544c87b03e5Sespie       struct call_site_record *cs = &cfun->eh->call_site_data[i];
3545c87b03e5Sespie       size += size_of_uleb128 (INTVAL (cs->landing_pad));
3546c87b03e5Sespie       size += size_of_uleb128 (cs->action);
3547c87b03e5Sespie     }
3548c87b03e5Sespie 
3549c87b03e5Sespie   return size;
3550c87b03e5Sespie }
3551c87b03e5Sespie #endif
3552c87b03e5Sespie 
3553c87b03e5Sespie static void
dw2_output_call_site_table()3554c87b03e5Sespie dw2_output_call_site_table ()
3555c87b03e5Sespie {
3556c87b03e5Sespie   const char *const function_start_lab
3557c87b03e5Sespie     = IDENTIFIER_POINTER (current_function_func_begin_label);
3558c87b03e5Sespie   int n = cfun->eh->call_site_data_used;
3559c87b03e5Sespie   int i;
3560c87b03e5Sespie 
3561c87b03e5Sespie   for (i = 0; i < n; ++i)
3562c87b03e5Sespie     {
3563c87b03e5Sespie       struct call_site_record *cs = &cfun->eh->call_site_data[i];
3564c87b03e5Sespie       char reg_start_lab[32];
3565c87b03e5Sespie       char reg_end_lab[32];
3566c87b03e5Sespie       char landing_pad_lab[32];
3567c87b03e5Sespie 
3568c87b03e5Sespie       ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3569c87b03e5Sespie       ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3570c87b03e5Sespie 
3571c87b03e5Sespie       if (cs->landing_pad)
3572c87b03e5Sespie 	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3573c87b03e5Sespie 				     CODE_LABEL_NUMBER (cs->landing_pad));
3574c87b03e5Sespie 
3575c87b03e5Sespie       /* ??? Perhaps use insn length scaling if the assembler supports
3576c87b03e5Sespie 	 generic arithmetic.  */
3577c87b03e5Sespie       /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3578c87b03e5Sespie 	 data4 if the function is small enough.  */
3579c87b03e5Sespie #ifdef HAVE_AS_LEB128
3580c87b03e5Sespie       dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3581c87b03e5Sespie 				    "region %d start", i);
3582c87b03e5Sespie       dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3583c87b03e5Sespie 				    "length");
3584c87b03e5Sespie       if (cs->landing_pad)
3585c87b03e5Sespie 	dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3586c87b03e5Sespie 				      "landing pad");
3587c87b03e5Sespie       else
3588c87b03e5Sespie 	dw2_asm_output_data_uleb128 (0, "landing pad");
3589c87b03e5Sespie #else
3590c87b03e5Sespie       dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3591c87b03e5Sespie 			    "region %d start", i);
3592c87b03e5Sespie       dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3593c87b03e5Sespie       if (cs->landing_pad)
3594c87b03e5Sespie 	dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3595c87b03e5Sespie 			      "landing pad");
3596c87b03e5Sespie       else
3597c87b03e5Sespie 	dw2_asm_output_data (4, 0, "landing pad");
3598c87b03e5Sespie #endif
3599c87b03e5Sespie       dw2_asm_output_data_uleb128 (cs->action, "action");
3600c87b03e5Sespie     }
3601c87b03e5Sespie 
3602c87b03e5Sespie   call_site_base += n;
3603c87b03e5Sespie }
3604c87b03e5Sespie 
3605c87b03e5Sespie static void
sjlj_output_call_site_table()3606c87b03e5Sespie sjlj_output_call_site_table ()
3607c87b03e5Sespie {
3608c87b03e5Sespie   int n = cfun->eh->call_site_data_used;
3609c87b03e5Sespie   int i;
3610c87b03e5Sespie 
3611c87b03e5Sespie   for (i = 0; i < n; ++i)
3612c87b03e5Sespie     {
3613c87b03e5Sespie       struct call_site_record *cs = &cfun->eh->call_site_data[i];
3614c87b03e5Sespie 
3615c87b03e5Sespie       dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3616c87b03e5Sespie 				   "region %d landing pad", i);
3617c87b03e5Sespie       dw2_asm_output_data_uleb128 (cs->action, "action");
3618c87b03e5Sespie     }
3619c87b03e5Sespie 
3620c87b03e5Sespie   call_site_base += n;
3621c87b03e5Sespie }
3622c87b03e5Sespie 
3623c87b03e5Sespie /* Tell assembler to switch to the section for the exception handling
3624c87b03e5Sespie    table.  */
3625c87b03e5Sespie 
3626c87b03e5Sespie void
default_exception_section()3627c87b03e5Sespie default_exception_section ()
3628c87b03e5Sespie {
3629c87b03e5Sespie   if (targetm.have_named_sections)
3630c87b03e5Sespie     {
3631c87b03e5Sespie       int flags;
3632c87b03e5Sespie #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3633c87b03e5Sespie       int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3634c87b03e5Sespie 
3635c87b03e5Sespie       flags = (! flag_pic
3636c87b03e5Sespie 	       || ((tt_format & 0x70) != DW_EH_PE_absptr
3637c87b03e5Sespie 		   && (tt_format & 0x70) != DW_EH_PE_aligned))
3638c87b03e5Sespie 	      ? 0 : SECTION_WRITE;
3639c87b03e5Sespie #else
3640c87b03e5Sespie       flags = SECTION_WRITE;
3641c87b03e5Sespie #endif
3642c87b03e5Sespie       named_section_flags (".gcc_except_table", flags);
3643c87b03e5Sespie     }
3644c87b03e5Sespie   else if (flag_pic)
3645c87b03e5Sespie     data_section ();
3646c87b03e5Sespie   else
3647c87b03e5Sespie     readonly_data_section ();
3648c87b03e5Sespie }
3649c87b03e5Sespie 
3650c87b03e5Sespie void
output_function_exception_table()3651c87b03e5Sespie output_function_exception_table ()
3652c87b03e5Sespie {
3653c87b03e5Sespie   int tt_format, cs_format, lp_format, i, n;
3654c87b03e5Sespie #ifdef HAVE_AS_LEB128
3655c87b03e5Sespie   char ttype_label[32];
3656c87b03e5Sespie   char cs_after_size_label[32];
3657c87b03e5Sespie   char cs_end_label[32];
3658c87b03e5Sespie #else
3659c87b03e5Sespie   int call_site_len;
3660c87b03e5Sespie #endif
3661c87b03e5Sespie   int have_tt_data;
3662c87b03e5Sespie   int tt_format_size = 0;
3663c87b03e5Sespie 
3664c87b03e5Sespie   /* Not all functions need anything.  */
3665c87b03e5Sespie   if (! cfun->uses_eh_lsda)
3666c87b03e5Sespie     return;
3667c87b03e5Sespie 
3668c87b03e5Sespie #ifdef IA64_UNWIND_INFO
3669c87b03e5Sespie   fputs ("\t.personality\t", asm_out_file);
3670c87b03e5Sespie   output_addr_const (asm_out_file, eh_personality_libfunc);
3671c87b03e5Sespie   fputs ("\n\t.handlerdata\n", asm_out_file);
3672c87b03e5Sespie   /* Note that varasm still thinks we're in the function's code section.
3673c87b03e5Sespie      The ".endp" directive that will immediately follow will take us back.  */
3674c87b03e5Sespie #else
3675c87b03e5Sespie   (*targetm.asm_out.exception_section) ();
3676c87b03e5Sespie #endif
3677c87b03e5Sespie 
3678c87b03e5Sespie   have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3679c87b03e5Sespie 		  || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3680c87b03e5Sespie 
3681c87b03e5Sespie   /* Indicate the format of the @TType entries.  */
3682c87b03e5Sespie   if (! have_tt_data)
3683c87b03e5Sespie     tt_format = DW_EH_PE_omit;
3684c87b03e5Sespie   else
3685c87b03e5Sespie     {
3686c87b03e5Sespie       tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3687c87b03e5Sespie #ifdef HAVE_AS_LEB128
3688c87b03e5Sespie       ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3689c87b03e5Sespie 				   current_function_funcdef_no);
3690c87b03e5Sespie #endif
3691c87b03e5Sespie       tt_format_size = size_of_encoded_value (tt_format);
3692c87b03e5Sespie 
3693c87b03e5Sespie       assemble_align (tt_format_size * BITS_PER_UNIT);
3694c87b03e5Sespie     }
3695c87b03e5Sespie 
3696c87b03e5Sespie   ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA",
3697c87b03e5Sespie 			     current_function_funcdef_no);
3698c87b03e5Sespie 
3699c87b03e5Sespie   /* The LSDA header.  */
3700c87b03e5Sespie 
3701c87b03e5Sespie   /* Indicate the format of the landing pad start pointer.  An omitted
3702c87b03e5Sespie      field implies @LPStart == @Start.  */
3703c87b03e5Sespie   /* Currently we always put @LPStart == @Start.  This field would
3704c87b03e5Sespie      be most useful in moving the landing pads completely out of
3705c87b03e5Sespie      line to another section, but it could also be used to minimize
3706c87b03e5Sespie      the size of uleb128 landing pad offsets.  */
3707c87b03e5Sespie   lp_format = DW_EH_PE_omit;
3708c87b03e5Sespie   dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3709c87b03e5Sespie 		       eh_data_format_name (lp_format));
3710c87b03e5Sespie 
3711c87b03e5Sespie   /* @LPStart pointer would go here.  */
3712c87b03e5Sespie 
3713c87b03e5Sespie   dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3714c87b03e5Sespie 		       eh_data_format_name (tt_format));
3715c87b03e5Sespie 
3716c87b03e5Sespie #ifndef HAVE_AS_LEB128
3717c87b03e5Sespie   if (USING_SJLJ_EXCEPTIONS)
3718c87b03e5Sespie     call_site_len = sjlj_size_of_call_site_table ();
3719c87b03e5Sespie   else
3720c87b03e5Sespie     call_site_len = dw2_size_of_call_site_table ();
3721c87b03e5Sespie #endif
3722c87b03e5Sespie 
3723c87b03e5Sespie   /* A pc-relative 4-byte displacement to the @TType data.  */
3724c87b03e5Sespie   if (have_tt_data)
3725c87b03e5Sespie     {
3726c87b03e5Sespie #ifdef HAVE_AS_LEB128
3727c87b03e5Sespie       char ttype_after_disp_label[32];
3728c87b03e5Sespie       ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3729c87b03e5Sespie 				   current_function_funcdef_no);
3730c87b03e5Sespie       dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3731c87b03e5Sespie 				    "@TType base offset");
3732c87b03e5Sespie       ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3733c87b03e5Sespie #else
3734c87b03e5Sespie       /* Ug.  Alignment queers things.  */
3735c87b03e5Sespie       unsigned int before_disp, after_disp, last_disp, disp;
3736c87b03e5Sespie 
3737c87b03e5Sespie       before_disp = 1 + 1;
3738c87b03e5Sespie       after_disp = (1 + size_of_uleb128 (call_site_len)
3739c87b03e5Sespie 		    + call_site_len
3740c87b03e5Sespie 		    + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3741c87b03e5Sespie 		    + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3742c87b03e5Sespie 		       * tt_format_size));
3743c87b03e5Sespie 
3744c87b03e5Sespie       disp = after_disp;
3745c87b03e5Sespie       do
3746c87b03e5Sespie 	{
3747c87b03e5Sespie 	  unsigned int disp_size, pad;
3748c87b03e5Sespie 
3749c87b03e5Sespie 	  last_disp = disp;
3750c87b03e5Sespie 	  disp_size = size_of_uleb128 (disp);
3751c87b03e5Sespie 	  pad = before_disp + disp_size + after_disp;
3752c87b03e5Sespie 	  if (pad % tt_format_size)
3753c87b03e5Sespie 	    pad = tt_format_size - (pad % tt_format_size);
3754c87b03e5Sespie 	  else
3755c87b03e5Sespie 	    pad = 0;
3756c87b03e5Sespie 	  disp = after_disp + pad;
3757c87b03e5Sespie 	}
3758c87b03e5Sespie       while (disp != last_disp);
3759c87b03e5Sespie 
3760c87b03e5Sespie       dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3761c87b03e5Sespie #endif
3762c87b03e5Sespie     }
3763c87b03e5Sespie 
3764c87b03e5Sespie   /* Indicate the format of the call-site offsets.  */
3765c87b03e5Sespie #ifdef HAVE_AS_LEB128
3766c87b03e5Sespie   cs_format = DW_EH_PE_uleb128;
3767c87b03e5Sespie #else
3768c87b03e5Sespie   cs_format = DW_EH_PE_udata4;
3769c87b03e5Sespie #endif
3770c87b03e5Sespie   dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3771c87b03e5Sespie 		       eh_data_format_name (cs_format));
3772c87b03e5Sespie 
3773c87b03e5Sespie #ifdef HAVE_AS_LEB128
3774c87b03e5Sespie   ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3775c87b03e5Sespie 			       current_function_funcdef_no);
3776c87b03e5Sespie   ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3777c87b03e5Sespie 			       current_function_funcdef_no);
3778c87b03e5Sespie   dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3779c87b03e5Sespie 				"Call-site table length");
3780c87b03e5Sespie   ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3781c87b03e5Sespie   if (USING_SJLJ_EXCEPTIONS)
3782c87b03e5Sespie     sjlj_output_call_site_table ();
3783c87b03e5Sespie   else
3784c87b03e5Sespie     dw2_output_call_site_table ();
3785c87b03e5Sespie   ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3786c87b03e5Sespie #else
3787c87b03e5Sespie   dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3788c87b03e5Sespie   if (USING_SJLJ_EXCEPTIONS)
3789c87b03e5Sespie     sjlj_output_call_site_table ();
3790c87b03e5Sespie   else
3791c87b03e5Sespie     dw2_output_call_site_table ();
3792c87b03e5Sespie #endif
3793c87b03e5Sespie 
3794c87b03e5Sespie   /* ??? Decode and interpret the data for flag_debug_asm.  */
3795c87b03e5Sespie   n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3796c87b03e5Sespie   for (i = 0; i < n; ++i)
3797c87b03e5Sespie     dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3798c87b03e5Sespie 			 (i ? NULL : "Action record table"));
3799c87b03e5Sespie 
3800c87b03e5Sespie   if (have_tt_data)
3801c87b03e5Sespie     assemble_align (tt_format_size * BITS_PER_UNIT);
3802c87b03e5Sespie 
3803c87b03e5Sespie   i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3804c87b03e5Sespie   while (i-- > 0)
3805c87b03e5Sespie     {
3806c87b03e5Sespie       tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3807c87b03e5Sespie       rtx value;
3808c87b03e5Sespie 
3809c87b03e5Sespie       if (type == NULL_TREE)
3810c87b03e5Sespie 	type = integer_zero_node;
3811c87b03e5Sespie       else
3812c87b03e5Sespie 	type = lookup_type_for_runtime (type);
3813c87b03e5Sespie 
3814c87b03e5Sespie       value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3815c87b03e5Sespie       if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3816c87b03e5Sespie 	assemble_integer (value, tt_format_size,
3817c87b03e5Sespie 			  tt_format_size * BITS_PER_UNIT, 1);
3818c87b03e5Sespie       else
3819c87b03e5Sespie 	dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3820c87b03e5Sespie     }
3821c87b03e5Sespie 
3822c87b03e5Sespie #ifdef HAVE_AS_LEB128
3823c87b03e5Sespie   if (have_tt_data)
3824c87b03e5Sespie       ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3825c87b03e5Sespie #endif
3826c87b03e5Sespie 
3827c87b03e5Sespie   /* ??? Decode and interpret the data for flag_debug_asm.  */
3828c87b03e5Sespie   n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3829c87b03e5Sespie   for (i = 0; i < n; ++i)
3830c87b03e5Sespie     dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3831c87b03e5Sespie 			 (i ? NULL : "Exception specification table"));
3832c87b03e5Sespie 
3833c87b03e5Sespie   function_section (current_function_decl);
3834c87b03e5Sespie }
3835c87b03e5Sespie 
3836c87b03e5Sespie #include "gt-except.h"
3837