xref: /openbsd/gnu/gcc/gcc/function.h (revision 404b540a)
1*404b540aSrobert /* Structure for saving state for a nested function.
2*404b540aSrobert    Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3*404b540aSrobert    1999, 2000, 2003, 2004, 2005 Free Software Foundation, Inc.
4*404b540aSrobert 
5*404b540aSrobert This file is part of GCC.
6*404b540aSrobert 
7*404b540aSrobert GCC is free software; you can redistribute it and/or modify it under
8*404b540aSrobert the terms of the GNU General Public License as published by the Free
9*404b540aSrobert Software Foundation; either version 2, or (at your option) any later
10*404b540aSrobert version.
11*404b540aSrobert 
12*404b540aSrobert GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13*404b540aSrobert WARRANTY; without even the implied warranty of MERCHANTABILITY or
14*404b540aSrobert FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15*404b540aSrobert for more details.
16*404b540aSrobert 
17*404b540aSrobert You should have received a copy of the GNU General Public License
18*404b540aSrobert along with GCC; see the file COPYING.  If not, write to the Free
19*404b540aSrobert Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20*404b540aSrobert 02110-1301, USA.  */
21*404b540aSrobert 
22*404b540aSrobert #ifndef GCC_FUNCTION_H
23*404b540aSrobert #define GCC_FUNCTION_H
24*404b540aSrobert 
25*404b540aSrobert #include "tree.h"
26*404b540aSrobert #include "hashtab.h"
27*404b540aSrobert 
28*404b540aSrobert struct var_refs_queue GTY(())
29*404b540aSrobert {
30*404b540aSrobert   rtx modified;
31*404b540aSrobert   enum machine_mode promoted_mode;
32*404b540aSrobert   int unsignedp;
33*404b540aSrobert   struct var_refs_queue *next;
34*404b540aSrobert };
35*404b540aSrobert 
36*404b540aSrobert /* Stack of pending (incomplete) sequences saved by `start_sequence'.
37*404b540aSrobert    Each element describes one pending sequence.
38*404b540aSrobert    The main insn-chain is saved in the last element of the chain,
39*404b540aSrobert    unless the chain is empty.  */
40*404b540aSrobert 
41*404b540aSrobert struct sequence_stack GTY(())
42*404b540aSrobert {
43*404b540aSrobert   /* First and last insns in the chain of the saved sequence.  */
44*404b540aSrobert   rtx first;
45*404b540aSrobert   rtx last;
46*404b540aSrobert   struct sequence_stack *next;
47*404b540aSrobert };
48*404b540aSrobert 
49*404b540aSrobert extern struct sequence_stack *sequence_stack;
50*404b540aSrobert 
51*404b540aSrobert /* Stack of single obstacks.  */
52*404b540aSrobert 
53*404b540aSrobert struct simple_obstack_stack
54*404b540aSrobert {
55*404b540aSrobert   struct obstack *obstack;
56*404b540aSrobert   struct simple_obstack_stack *next;
57*404b540aSrobert };
58*404b540aSrobert 
59*404b540aSrobert struct emit_status GTY(())
60*404b540aSrobert {
61*404b540aSrobert   /* This is reset to LAST_VIRTUAL_REGISTER + 1 at the start of each function.
62*404b540aSrobert      After rtl generation, it is 1 plus the largest register number used.  */
63*404b540aSrobert   int x_reg_rtx_no;
64*404b540aSrobert 
65*404b540aSrobert   /* Lowest label number in current function.  */
66*404b540aSrobert   int x_first_label_num;
67*404b540aSrobert 
68*404b540aSrobert   /* The ends of the doubly-linked chain of rtl for the current function.
69*404b540aSrobert      Both are reset to null at the start of rtl generation for the function.
70*404b540aSrobert 
71*404b540aSrobert      start_sequence saves both of these on `sequence_stack' and then starts
72*404b540aSrobert      a new, nested sequence of insns.  */
73*404b540aSrobert   rtx x_first_insn;
74*404b540aSrobert   rtx x_last_insn;
75*404b540aSrobert 
76*404b540aSrobert   /* Stack of pending (incomplete) sequences saved by `start_sequence'.
77*404b540aSrobert      Each element describes one pending sequence.
78*404b540aSrobert      The main insn-chain is saved in the last element of the chain,
79*404b540aSrobert      unless the chain is empty.  */
80*404b540aSrobert   struct sequence_stack *sequence_stack;
81*404b540aSrobert 
82*404b540aSrobert   /* INSN_UID for next insn emitted.
83*404b540aSrobert      Reset to 1 for each function compiled.  */
84*404b540aSrobert   int x_cur_insn_uid;
85*404b540aSrobert 
86*404b540aSrobert   /* Location the last line-number NOTE emitted.
87*404b540aSrobert      This is used to avoid generating duplicates.  */
88*404b540aSrobert   location_t x_last_location;
89*404b540aSrobert 
90*404b540aSrobert   /* The length of the regno_pointer_align, regno_decl, and x_regno_reg_rtx
91*404b540aSrobert      vectors.  Since these vectors are needed during the expansion phase when
92*404b540aSrobert      the total number of registers in the function is not yet known, the
93*404b540aSrobert      vectors are copied and made bigger when necessary.  */
94*404b540aSrobert   int regno_pointer_align_length;
95*404b540aSrobert 
96*404b540aSrobert   /* Indexed by pseudo register number, if nonzero gives the known alignment
97*404b540aSrobert      for that pseudo (if REG_POINTER is set in x_regno_reg_rtx).
98*404b540aSrobert      Allocated in parallel with x_regno_reg_rtx.  */
99*404b540aSrobert   unsigned char * GTY ((length ("%h.x_reg_rtx_no")))
100*404b540aSrobert     regno_pointer_align;
101*404b540aSrobert 
102*404b540aSrobert   /* Indexed by pseudo register number, gives the rtx for that pseudo.
103*404b540aSrobert      Allocated in parallel with regno_pointer_align.  */
104*404b540aSrobert   rtx * GTY ((length ("%h.x_reg_rtx_no"))) x_regno_reg_rtx;
105*404b540aSrobert };
106*404b540aSrobert 
107*404b540aSrobert /* For backward compatibility... eventually these should all go away.  */
108*404b540aSrobert #define reg_rtx_no (cfun->emit->x_reg_rtx_no)
109*404b540aSrobert #define regno_reg_rtx (cfun->emit->x_regno_reg_rtx)
110*404b540aSrobert #define seq_stack (cfun->emit->sequence_stack)
111*404b540aSrobert 
112*404b540aSrobert #define REGNO_POINTER_ALIGN(REGNO) (cfun->emit->regno_pointer_align[REGNO])
113*404b540aSrobert 
114*404b540aSrobert struct expr_status GTY(())
115*404b540aSrobert {
116*404b540aSrobert   /* Number of units that we should eventually pop off the stack.
117*404b540aSrobert      These are the arguments to function calls that have already returned.  */
118*404b540aSrobert   int x_pending_stack_adjust;
119*404b540aSrobert 
120*404b540aSrobert   /* Under some ABIs, it is the caller's responsibility to pop arguments
121*404b540aSrobert      pushed for function calls.  A naive implementation would simply pop
122*404b540aSrobert      the arguments immediately after each call.  However, if several
123*404b540aSrobert      function calls are made in a row, it is typically cheaper to pop
124*404b540aSrobert      all the arguments after all of the calls are complete since a
125*404b540aSrobert      single pop instruction can be used.  Therefore, GCC attempts to
126*404b540aSrobert      defer popping the arguments until absolutely necessary.  (For
127*404b540aSrobert      example, at the end of a conditional, the arguments must be popped,
128*404b540aSrobert      since code outside the conditional won't know whether or not the
129*404b540aSrobert      arguments need to be popped.)
130*404b540aSrobert 
131*404b540aSrobert      When INHIBIT_DEFER_POP is nonzero, however, the compiler does not
132*404b540aSrobert      attempt to defer pops.  Instead, the stack is popped immediately
133*404b540aSrobert      after each call.  Rather then setting this variable directly, use
134*404b540aSrobert      NO_DEFER_POP and OK_DEFER_POP.  */
135*404b540aSrobert   int x_inhibit_defer_pop;
136*404b540aSrobert 
137*404b540aSrobert   /* If PREFERRED_STACK_BOUNDARY and PUSH_ROUNDING are defined, the stack
138*404b540aSrobert      boundary can be momentarily unaligned while pushing the arguments.
139*404b540aSrobert      Record the delta since last aligned boundary here in order to get
140*404b540aSrobert      stack alignment in the nested function calls working right.  */
141*404b540aSrobert   int x_stack_pointer_delta;
142*404b540aSrobert 
143*404b540aSrobert   /* Nonzero means __builtin_saveregs has already been done in this function.
144*404b540aSrobert      The value is the pseudoreg containing the value __builtin_saveregs
145*404b540aSrobert      returned.  */
146*404b540aSrobert   rtx x_saveregs_value;
147*404b540aSrobert 
148*404b540aSrobert   /* Similarly for __builtin_apply_args.  */
149*404b540aSrobert   rtx x_apply_args_value;
150*404b540aSrobert 
151*404b540aSrobert   /* List of labels that must never be deleted.  */
152*404b540aSrobert   rtx x_forced_labels;
153*404b540aSrobert };
154*404b540aSrobert 
155*404b540aSrobert #define pending_stack_adjust (cfun->expr->x_pending_stack_adjust)
156*404b540aSrobert #define inhibit_defer_pop (cfun->expr->x_inhibit_defer_pop)
157*404b540aSrobert #define saveregs_value (cfun->expr->x_saveregs_value)
158*404b540aSrobert #define apply_args_value (cfun->expr->x_apply_args_value)
159*404b540aSrobert #define forced_labels (cfun->expr->x_forced_labels)
160*404b540aSrobert #define stack_pointer_delta (cfun->expr->x_stack_pointer_delta)
161*404b540aSrobert 
162*404b540aSrobert struct temp_slot;
163*404b540aSrobert typedef struct temp_slot *temp_slot_p;
164*404b540aSrobert 
165*404b540aSrobert DEF_VEC_P(temp_slot_p);
166*404b540aSrobert DEF_VEC_ALLOC_P(temp_slot_p,gc);
167*404b540aSrobert 
168*404b540aSrobert enum function_frequency {
169*404b540aSrobert   /* This function most likely won't be executed at all.
170*404b540aSrobert      (set only when profile feedback is available).  */
171*404b540aSrobert   FUNCTION_FREQUENCY_UNLIKELY_EXECUTED,
172*404b540aSrobert   /* The default value.  */
173*404b540aSrobert   FUNCTION_FREQUENCY_NORMAL,
174*404b540aSrobert   /* Optimize this function hard
175*404b540aSrobert      (set only when profile feedback is available).  */
176*404b540aSrobert   FUNCTION_FREQUENCY_HOT
177*404b540aSrobert };
178*404b540aSrobert 
179*404b540aSrobert /* This structure can save all the important global and static variables
180*404b540aSrobert    describing the status of the current function.  */
181*404b540aSrobert 
182*404b540aSrobert struct function GTY(())
183*404b540aSrobert {
184*404b540aSrobert   struct eh_status *eh;
185*404b540aSrobert   struct expr_status *expr;
186*404b540aSrobert   struct emit_status *emit;
187*404b540aSrobert   struct varasm_status *varasm;
188*404b540aSrobert 
189*404b540aSrobert   /* The control flow graph for this function.  */
190*404b540aSrobert   struct control_flow_graph *cfg;
191*404b540aSrobert 
192*404b540aSrobert   /* For function.c.  */
193*404b540aSrobert 
194*404b540aSrobert   /* Points to the FUNCTION_DECL of this function.  */
195*404b540aSrobert   tree decl;
196*404b540aSrobert 
197*404b540aSrobert   /* Function containing this function, if any.  */
198*404b540aSrobert   struct function *outer;
199*404b540aSrobert 
200*404b540aSrobert   /* Number of bytes of args popped by function being compiled on its return.
201*404b540aSrobert      Zero if no bytes are to be popped.
202*404b540aSrobert      May affect compilation of return insn or of function epilogue.  */
203*404b540aSrobert   int pops_args;
204*404b540aSrobert 
205*404b540aSrobert   /* If function's args have a fixed size, this is that size, in bytes.
206*404b540aSrobert      Otherwise, it is -1.
207*404b540aSrobert      May affect compilation of return insn or of function epilogue.  */
208*404b540aSrobert   int args_size;
209*404b540aSrobert 
210*404b540aSrobert   /* # bytes the prologue should push and pretend that the caller pushed them.
211*404b540aSrobert      The prologue must do this, but only if parms can be passed in
212*404b540aSrobert      registers.  */
213*404b540aSrobert   int pretend_args_size;
214*404b540aSrobert 
215*404b540aSrobert   /* # of bytes of outgoing arguments.  If ACCUMULATE_OUTGOING_ARGS is
216*404b540aSrobert      defined, the needed space is pushed by the prologue.  */
217*404b540aSrobert   int outgoing_args_size;
218*404b540aSrobert 
219*404b540aSrobert   /* This is the offset from the arg pointer to the place where the first
220*404b540aSrobert      anonymous arg can be found, if there is one.  */
221*404b540aSrobert   rtx arg_offset_rtx;
222*404b540aSrobert 
223*404b540aSrobert   /* Quantities of various kinds of registers
224*404b540aSrobert      used for the current function's args.  */
225*404b540aSrobert   CUMULATIVE_ARGS args_info;
226*404b540aSrobert 
227*404b540aSrobert   /* If nonzero, an RTL expression for the location at which the current
228*404b540aSrobert      function returns its result.  If the current function returns its
229*404b540aSrobert      result in a register, current_function_return_rtx will always be
230*404b540aSrobert      the hard register containing the result.  */
231*404b540aSrobert   rtx return_rtx;
232*404b540aSrobert 
233*404b540aSrobert   /* The arg pointer hard register, or the pseudo into which it was copied.  */
234*404b540aSrobert   rtx internal_arg_pointer;
235*404b540aSrobert 
236*404b540aSrobert   /* Opaque pointer used by get_hard_reg_initial_val and
237*404b540aSrobert      has_hard_reg_initial_val (see integrate.[hc]).  */
238*404b540aSrobert   struct initial_value_struct *hard_reg_initial_vals;
239*404b540aSrobert 
240*404b540aSrobert   /* List (chain of EXPR_LIST) of labels heading the current handlers for
241*404b540aSrobert      nonlocal gotos.  */
242*404b540aSrobert   rtx x_nonlocal_goto_handler_labels;
243*404b540aSrobert 
244*404b540aSrobert   /* Label that will go on function epilogue.
245*404b540aSrobert      Jumping to this label serves as a "return" instruction
246*404b540aSrobert      on machines which require execution of the epilogue on all returns.  */
247*404b540aSrobert   rtx x_return_label;
248*404b540aSrobert 
249*404b540aSrobert   /* Label that will go on the end of function epilogue.
250*404b540aSrobert      Jumping to this label serves as a "naked return" instruction
251*404b540aSrobert      on machines which require execution of the epilogue on all returns.  */
252*404b540aSrobert   rtx x_naked_return_label;
253*404b540aSrobert 
254*404b540aSrobert   /* List (chain of EXPR_LISTs) of all stack slots in this function.
255*404b540aSrobert      Made for the sake of unshare_all_rtl.  */
256*404b540aSrobert   rtx x_stack_slot_list;
257*404b540aSrobert 
258*404b540aSrobert   /* Place after which to insert the tail_recursion_label if we need one.  */
259*404b540aSrobert   rtx x_stack_check_probe_note;
260*404b540aSrobert 
261*404b540aSrobert   /* Location at which to save the argument pointer if it will need to be
262*404b540aSrobert      referenced.  There are two cases where this is done: if nonlocal gotos
263*404b540aSrobert      exist, or if vars stored at an offset from the argument pointer will be
264*404b540aSrobert      needed by inner routines.  */
265*404b540aSrobert   rtx x_arg_pointer_save_area;
266*404b540aSrobert 
267*404b540aSrobert   /* Offset to end of allocated area of stack frame.
268*404b540aSrobert      If stack grows down, this is the address of the last stack slot allocated.
269*404b540aSrobert      If stack grows up, this is the address for the next slot.  */
270*404b540aSrobert   HOST_WIDE_INT x_frame_offset;
271*404b540aSrobert 
272*404b540aSrobert   /* A PARM_DECL that should contain the static chain for this function.
273*404b540aSrobert      It will be initialized at the beginning of the function.  */
274*404b540aSrobert   tree static_chain_decl;
275*404b540aSrobert 
276*404b540aSrobert   /* An expression that contains the non-local goto save area.  The first
277*404b540aSrobert      word is the saved frame pointer and the second is the saved stack
278*404b540aSrobert      pointer.  */
279*404b540aSrobert   tree nonlocal_goto_save_area;
280*404b540aSrobert 
281*404b540aSrobert   /* Insn after which register parms and SAVE_EXPRs are born, if nonopt.  */
282*404b540aSrobert   rtx x_parm_birth_insn;
283*404b540aSrobert 
284*404b540aSrobert   /* List of all used temporaries allocated, by level.  */
285*404b540aSrobert   VEC(temp_slot_p,gc) *x_used_temp_slots;
286*404b540aSrobert 
287*404b540aSrobert   /* List of available temp slots.  */
288*404b540aSrobert   struct temp_slot *x_avail_temp_slots;
289*404b540aSrobert 
290*404b540aSrobert   /* This slot is initialized as 0 and is added to
291*404b540aSrobert      during the nested function.  */
292*404b540aSrobert   struct var_refs_queue *fixup_var_refs_queue;
293*404b540aSrobert 
294*404b540aSrobert   /* Current nesting level for temporaries.  */
295*404b540aSrobert   int x_temp_slot_level;
296*404b540aSrobert 
297*404b540aSrobert   /* Highest label number in current function.  */
298*404b540aSrobert   int inl_max_label_num;
299*404b540aSrobert 
300*404b540aSrobert   /* Function sequence number for profiling, debugging, etc.  */
301*404b540aSrobert   int funcdef_no;
302*404b540aSrobert 
303*404b540aSrobert   /* For flow.c.  */
304*404b540aSrobert 
305*404b540aSrobert   /* Highest loop depth seen so far in loop analysis.  Used in flow.c
306*404b540aSrobert      for the "failure strategy" when doing liveness analysis starting
307*404b540aSrobert      with non-empty initial sets.  */
308*404b540aSrobert   int max_loop_depth;
309*404b540aSrobert 
310*404b540aSrobert   /* For md files.  */
311*404b540aSrobert 
312*404b540aSrobert   /* tm.h can use this to store whatever it likes.  */
313*404b540aSrobert   struct machine_function * GTY ((maybe_undef)) machine;
314*404b540aSrobert 
315*404b540aSrobert   /* The largest alignment of slot allocated on the stack.  */
316*404b540aSrobert   unsigned int stack_alignment_needed;
317*404b540aSrobert 
318*404b540aSrobert   /* Preferred alignment of the end of stack frame.  */
319*404b540aSrobert   unsigned int preferred_stack_boundary;
320*404b540aSrobert 
321*404b540aSrobert   /* Language-specific code can use this to store whatever it likes.  */
322*404b540aSrobert   struct language_function * language;
323*404b540aSrobert 
324*404b540aSrobert   /* Used types hash table.  */
325*404b540aSrobert   htab_t GTY ((param_is (union tree_node))) used_types_hash;
326*404b540aSrobert 
327*404b540aSrobert   /* For reorg.  */
328*404b540aSrobert 
329*404b540aSrobert   /* If some insns can be deferred to the delay slots of the epilogue, the
330*404b540aSrobert      delay list for them is recorded here.  */
331*404b540aSrobert   rtx epilogue_delay_list;
332*404b540aSrobert 
333*404b540aSrobert   /* Maximal number of entities in the single jumptable.  Used to estimate
334*404b540aSrobert      final flowgraph size.  */
335*404b540aSrobert   int max_jumptable_ents;
336*404b540aSrobert 
337*404b540aSrobert   /* UIDs for LABEL_DECLs.  */
338*404b540aSrobert   int last_label_uid;
339*404b540aSrobert 
340*404b540aSrobert   /* Line number of the end of the function.  */
341*404b540aSrobert   location_t function_end_locus;
342*404b540aSrobert 
343*404b540aSrobert   /* Array mapping insn uids to blocks.  */
344*404b540aSrobert   VEC(tree,gc) *ib_boundaries_block;
345*404b540aSrobert 
346*404b540aSrobert   /* The variables unexpanded so far.  */
347*404b540aSrobert   tree unexpanded_var_list;
348*404b540aSrobert 
349*404b540aSrobert   /* Assembly labels for the hot and cold text sections, to
350*404b540aSrobert      be used by debugger functions for determining the size of text
351*404b540aSrobert      sections.  */
352*404b540aSrobert 
353*404b540aSrobert   const char *hot_section_label;
354*404b540aSrobert   const char *cold_section_label;
355*404b540aSrobert   const char *hot_section_end_label;
356*404b540aSrobert   const char *cold_section_end_label;
357*404b540aSrobert 
358*404b540aSrobert   /* String to be used for name of cold text sections, via
359*404b540aSrobert      targetm.asm_out.named_section.  */
360*404b540aSrobert 
361*404b540aSrobert   const char *unlikely_text_section_name;
362*404b540aSrobert 
363*404b540aSrobert   /* A variable living at the top of the frame that holds a known value.
364*404b540aSrobert      Used for detecting stack clobbers.  */
365*404b540aSrobert   tree stack_protect_guard;
366*404b540aSrobert 
367*404b540aSrobert   /* Collected bit flags.  */
368*404b540aSrobert 
369*404b540aSrobert   /* Nonzero if function being compiled needs to be given an address
370*404b540aSrobert      where the value should be stored.  */
371*404b540aSrobert   unsigned int returns_struct : 1;
372*404b540aSrobert 
373*404b540aSrobert   /* Nonzero if function being compiled needs to
374*404b540aSrobert      return the address of where it has put a structure value.  */
375*404b540aSrobert   unsigned int returns_pcc_struct : 1;
376*404b540aSrobert 
377*404b540aSrobert   /* Nonzero if the current function returns a pointer type.  */
378*404b540aSrobert   unsigned int returns_pointer : 1;
379*404b540aSrobert 
380*404b540aSrobert   /* Nonzero if function being compiled can call setjmp.  */
381*404b540aSrobert   unsigned int calls_setjmp : 1;
382*404b540aSrobert 
383*404b540aSrobert   /* Nonzero if function being compiled can call alloca,
384*404b540aSrobert      either as a subroutine or builtin.  */
385*404b540aSrobert   unsigned int calls_alloca : 1;
386*404b540aSrobert 
387*404b540aSrobert   /* Nonzero if function being compiled called builtin_return_addr or
388*404b540aSrobert      builtin_frame_address with nonzero count.  */
389*404b540aSrobert   unsigned int accesses_prior_frames : 1;
390*404b540aSrobert 
391*404b540aSrobert   /* Nonzero if the function calls __builtin_eh_return.  */
392*404b540aSrobert   unsigned int calls_eh_return : 1;
393*404b540aSrobert 
394*404b540aSrobert   /* Nonzero if function being compiled receives nonlocal gotos
395*404b540aSrobert      from nested functions.  */
396*404b540aSrobert   unsigned int has_nonlocal_label : 1;
397*404b540aSrobert 
398*404b540aSrobert   /* Nonzero if function being compiled has nonlocal gotos to parent
399*404b540aSrobert      function.  */
400*404b540aSrobert   unsigned int has_nonlocal_goto : 1;
401*404b540aSrobert 
402*404b540aSrobert   /* Nonzero if the current function is a thunk, i.e., a lightweight
403*404b540aSrobert      function implemented by the output_mi_thunk hook) that just
404*404b540aSrobert      adjusts one of its arguments and forwards to another
405*404b540aSrobert      function.  */
406*404b540aSrobert   unsigned int is_thunk : 1;
407*404b540aSrobert 
408*404b540aSrobert   /* This bit is used by the exception handling logic.  It is set if all
409*404b540aSrobert      calls (if any) are sibling calls.  Such functions do not have to
410*404b540aSrobert      have EH tables generated, as they cannot throw.  A call to such a
411*404b540aSrobert      function, however, should be treated as throwing if any of its callees
412*404b540aSrobert      can throw.  */
413*404b540aSrobert   unsigned int all_throwers_are_sibcalls : 1;
414*404b540aSrobert 
415*404b540aSrobert   /* Nonzero if profiling code should be generated.  */
416*404b540aSrobert   unsigned int profile : 1;
417*404b540aSrobert 
418*404b540aSrobert   /* Nonzero if stack limit checking should be enabled in the current
419*404b540aSrobert      function.  */
420*404b540aSrobert   unsigned int limit_stack : 1;
421*404b540aSrobert 
422*404b540aSrobert   /* Nonzero if current function uses stdarg.h or equivalent.  */
423*404b540aSrobert   unsigned int stdarg : 1;
424*404b540aSrobert 
425*404b540aSrobert   /* Nonzero if the back-end should not keep track of expressions that
426*404b540aSrobert      determine the size of variable-sized objects.  Normally, such
427*404b540aSrobert      expressions are saved away, and then expanded when the next
428*404b540aSrobert      function is started.  For example, if a parameter has a
429*404b540aSrobert      variable-sized type, then the size of the parameter is computed
430*404b540aSrobert      when the function body is entered.  However, some front-ends do
431*404b540aSrobert      not desire this behavior.  */
432*404b540aSrobert   unsigned int x_dont_save_pending_sizes_p : 1;
433*404b540aSrobert 
434*404b540aSrobert   /* Nonzero if the current function uses the constant pool.  */
435*404b540aSrobert   unsigned int uses_const_pool : 1;
436*404b540aSrobert 
437*404b540aSrobert   /* Nonzero if the current function uses pic_offset_table_rtx.  */
438*404b540aSrobert   unsigned int uses_pic_offset_table : 1;
439*404b540aSrobert 
440*404b540aSrobert   /* Nonzero if the current function needs an lsda for exception handling.  */
441*404b540aSrobert   unsigned int uses_eh_lsda : 1;
442*404b540aSrobert 
443*404b540aSrobert   /* Nonzero if code to initialize arg_pointer_save_area has been emitted.  */
444*404b540aSrobert   unsigned int arg_pointer_save_area_init : 1;
445*404b540aSrobert 
446*404b540aSrobert   unsigned int after_inlining : 1;
447*404b540aSrobert 
448*404b540aSrobert   /* Set when the call to function itself has been emit.  */
449*404b540aSrobert   unsigned int recursive_call_emit : 1;
450*404b540aSrobert 
451*404b540aSrobert   /* Set when the tail call has been produced.  */
452*404b540aSrobert   unsigned int tail_call_emit : 1;
453*404b540aSrobert 
454*404b540aSrobert   /* How commonly executed the function is.  Initialized during branch
455*404b540aSrobert      probabilities pass.  */
456*404b540aSrobert   ENUM_BITFIELD (function_frequency) function_frequency : 2;
457*404b540aSrobert 
458*404b540aSrobert   /* Number of units of general registers that need saving in stdarg
459*404b540aSrobert      function.  What unit is depends on the backend, either it is number
460*404b540aSrobert      of bytes, or it can be number of registers.  */
461*404b540aSrobert   unsigned int va_list_gpr_size : 8;
462*404b540aSrobert 
463*404b540aSrobert   /* Number of units of floating point registers that need saving in stdarg
464*404b540aSrobert      function.  */
465*404b540aSrobert   unsigned int va_list_fpr_size : 8;
466*404b540aSrobert };
467*404b540aSrobert 
468*404b540aSrobert /* If va_list_[gf]pr_size is set to this, it means we don't know how
469*404b540aSrobert    many units need to be saved.  */
470*404b540aSrobert #define VA_LIST_MAX_GPR_SIZE	255
471*404b540aSrobert #define VA_LIST_MAX_FPR_SIZE	255
472*404b540aSrobert 
473*404b540aSrobert /* The function currently being compiled.  */
474*404b540aSrobert extern GTY(()) struct function *cfun;
475*404b540aSrobert 
476*404b540aSrobert /* Pointer to chain of `struct function' for containing functions.  */
477*404b540aSrobert extern GTY(()) struct function *outer_function_chain;
478*404b540aSrobert 
479*404b540aSrobert /* Nonzero if we've already converted virtual regs to hard regs.  */
480*404b540aSrobert extern int virtuals_instantiated;
481*404b540aSrobert 
482*404b540aSrobert /* Nonzero if at least one trampoline has been created.  */
483*404b540aSrobert extern int trampolines_created;
484*404b540aSrobert 
485*404b540aSrobert /* For backward compatibility... eventually these should all go away.  */
486*404b540aSrobert #define current_function_pops_args (cfun->pops_args)
487*404b540aSrobert #define current_function_returns_struct (cfun->returns_struct)
488*404b540aSrobert #define current_function_returns_pcc_struct (cfun->returns_pcc_struct)
489*404b540aSrobert #define current_function_returns_pointer (cfun->returns_pointer)
490*404b540aSrobert #define current_function_calls_setjmp (cfun->calls_setjmp)
491*404b540aSrobert #define current_function_calls_alloca (cfun->calls_alloca)
492*404b540aSrobert #define current_function_accesses_prior_frames (cfun->accesses_prior_frames)
493*404b540aSrobert #define current_function_calls_eh_return (cfun->calls_eh_return)
494*404b540aSrobert #define current_function_is_thunk (cfun->is_thunk)
495*404b540aSrobert #define current_function_args_info (cfun->args_info)
496*404b540aSrobert #define current_function_args_size (cfun->args_size)
497*404b540aSrobert #define current_function_pretend_args_size (cfun->pretend_args_size)
498*404b540aSrobert #define current_function_outgoing_args_size (cfun->outgoing_args_size)
499*404b540aSrobert #define current_function_arg_offset_rtx (cfun->arg_offset_rtx)
500*404b540aSrobert #define current_function_stdarg (cfun->stdarg)
501*404b540aSrobert #define current_function_internal_arg_pointer (cfun->internal_arg_pointer)
502*404b540aSrobert #define current_function_return_rtx (cfun->return_rtx)
503*404b540aSrobert #define current_function_profile (cfun->profile)
504*404b540aSrobert #define current_function_funcdef_no (cfun->funcdef_no)
505*404b540aSrobert #define current_function_limit_stack (cfun->limit_stack)
506*404b540aSrobert #define current_function_uses_pic_offset_table (cfun->uses_pic_offset_table)
507*404b540aSrobert #define current_function_uses_const_pool (cfun->uses_const_pool)
508*404b540aSrobert #define current_function_epilogue_delay_list (cfun->epilogue_delay_list)
509*404b540aSrobert #define current_function_has_nonlocal_label (cfun->has_nonlocal_label)
510*404b540aSrobert #define current_function_has_nonlocal_goto (cfun->has_nonlocal_goto)
511*404b540aSrobert 
512*404b540aSrobert #define return_label (cfun->x_return_label)
513*404b540aSrobert #define naked_return_label (cfun->x_naked_return_label)
514*404b540aSrobert #define stack_slot_list (cfun->x_stack_slot_list)
515*404b540aSrobert #define parm_birth_insn (cfun->x_parm_birth_insn)
516*404b540aSrobert #define frame_offset (cfun->x_frame_offset)
517*404b540aSrobert #define stack_check_probe_note (cfun->x_stack_check_probe_note)
518*404b540aSrobert #define arg_pointer_save_area (cfun->x_arg_pointer_save_area)
519*404b540aSrobert #define used_temp_slots (cfun->x_used_temp_slots)
520*404b540aSrobert #define avail_temp_slots (cfun->x_avail_temp_slots)
521*404b540aSrobert #define temp_slot_level (cfun->x_temp_slot_level)
522*404b540aSrobert #define nonlocal_goto_handler_labels (cfun->x_nonlocal_goto_handler_labels)
523*404b540aSrobert 
524*404b540aSrobert /* Given a function decl for a containing function,
525*404b540aSrobert    return the `struct function' for it.  */
526*404b540aSrobert struct function *find_function_data (tree);
527*404b540aSrobert 
528*404b540aSrobert /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
529*404b540aSrobert    and create duplicate blocks.  */
530*404b540aSrobert extern void reorder_blocks (void);
531*404b540aSrobert 
532*404b540aSrobert /* Set BLOCK_NUMBER for all the blocks in FN.  */
533*404b540aSrobert extern void number_blocks (tree);
534*404b540aSrobert 
535*404b540aSrobert extern void clear_block_marks (tree);
536*404b540aSrobert extern tree blocks_nreverse (tree);
537*404b540aSrobert extern void reset_block_changes (void);
538*404b540aSrobert extern void record_block_change (tree);
539*404b540aSrobert extern void finalize_block_changes (void);
540*404b540aSrobert extern void check_block_change (rtx, tree *);
541*404b540aSrobert extern void free_block_changes (void);
542*404b540aSrobert 
543*404b540aSrobert /* Return size needed for stack frame based on slots so far allocated.
544*404b540aSrobert    This size counts from zero.  It is not rounded to STACK_BOUNDARY;
545*404b540aSrobert    the caller may have to do that.  */
546*404b540aSrobert extern HOST_WIDE_INT get_frame_size (void);
547*404b540aSrobert 
548*404b540aSrobert /* Issue an error message and return TRUE if frame OFFSET overflows in
549*404b540aSrobert    the signed target pointer arithmetics for function FUNC.  Otherwise
550*404b540aSrobert    return FALSE.  */
551*404b540aSrobert extern bool frame_offset_overflow (HOST_WIDE_INT, tree);
552*404b540aSrobert 
553*404b540aSrobert /* A pointer to a function to create target specific, per-function
554*404b540aSrobert    data structures.  */
555*404b540aSrobert extern struct machine_function * (*init_machine_status) (void);
556*404b540aSrobert 
557*404b540aSrobert /* Save and restore status information for a nested function.  */
558*404b540aSrobert extern void free_after_parsing (struct function *);
559*404b540aSrobert extern void free_after_compilation (struct function *);
560*404b540aSrobert 
561*404b540aSrobert extern void init_varasm_status (struct function *);
562*404b540aSrobert 
563*404b540aSrobert #ifdef RTX_CODE
564*404b540aSrobert extern void diddle_return_value (void (*)(rtx, void*), void*);
565*404b540aSrobert extern void clobber_return_register (void);
566*404b540aSrobert #endif
567*404b540aSrobert 
568*404b540aSrobert extern rtx get_arg_pointer_save_area (struct function *);
569*404b540aSrobert 
570*404b540aSrobert /* Returns the name of the current function.  */
571*404b540aSrobert extern const char *current_function_name (void);
572*404b540aSrobert 
573*404b540aSrobert extern void do_warn_unused_parameter (tree);
574*404b540aSrobert 
575*404b540aSrobert extern bool pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
576*404b540aSrobert 			       tree, bool);
577*404b540aSrobert extern bool reference_callee_copied (CUMULATIVE_ARGS *, enum machine_mode,
578*404b540aSrobert 				     tree, bool);
579*404b540aSrobert 
580*404b540aSrobert extern void used_types_insert (tree);
581*404b540aSrobert 
582*404b540aSrobert #endif  /* GCC_FUNCTION_H */
583