xref: /openbsd/gnu/usr.bin/gcc/gcc/stmt.c (revision 8c057a53)
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2    Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3    1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 /* This file handles the generation of rtl code from tree structure
23    above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24    It also creates the rtl expressions for parameters and auto variables
25    and has full responsibility for allocating stack slots.
26 
27    The functions whose names start with `expand_' are called by the
28    parser to generate RTL instructions for various kinds of constructs.
29 
30    Some control and binding constructs require calling several such
31    functions at different times.  For example, a simple if-then
32    is expanded by calling `expand_start_cond' (with the condition-expression
33    as argument) before parsing the then-clause and calling `expand_end_cond'
34    after parsing the then-clause.  */
35 
36 #include "config.h"
37 #include "system.h"
38 
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-config.h"
46 #include "expr.h"
47 #include "libfuncs.h"
48 #include "hard-reg-set.h"
49 #include "loop.h"
50 #include "recog.h"
51 #include "machmode.h"
52 #include "toplev.h"
53 #include "output.h"
54 #include "ggc.h"
55 #include "langhooks.h"
56 #include "predict.h"
57 
58 /* Assume that case vectors are not pc-relative.  */
59 #ifndef CASE_VECTOR_PC_RELATIVE
60 #define CASE_VECTOR_PC_RELATIVE 0
61 #endif
62 
63 /* Functions and data structures for expanding case statements.  */
64 
65 /* Case label structure, used to hold info on labels within case
66    statements.  We handle "range" labels; for a single-value label
67    as in C, the high and low limits are the same.
68 
69    An AVL tree of case nodes is initially created, and later transformed
70    to a list linked via the RIGHT fields in the nodes.  Nodes with
71    higher case values are later in the list.
72 
73    Switch statements can be output in one of two forms.  A branch table
74    is used if there are more than a few labels and the labels are dense
75    within the range between the smallest and largest case value.  If a
76    branch table is used, no further manipulations are done with the case
77    node chain.
78 
79    The alternative to the use of a branch table is to generate a series
80    of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
81    and PARENT fields to hold a binary tree.  Initially the tree is
82    totally unbalanced, with everything on the right.  We balance the tree
83    with nodes on the left having lower case values than the parent
84    and nodes on the right having higher values.  We then output the tree
85    in order.  */
86 
87 struct case_node GTY(())
88 {
89   struct case_node	*left;	/* Left son in binary tree */
90   struct case_node	*right;	/* Right son in binary tree; also node chain */
91   struct case_node	*parent; /* Parent of node in binary tree */
92   tree			low;	/* Lowest index value for this label */
93   tree			high;	/* Highest index value for this label */
94   tree			code_label; /* Label to jump to when node matches */
95   int			balance;
96 };
97 
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
100 
101 /* These are used by estimate_case_costs and balance_case_nodes.  */
102 
103 /* This must be a signed type, and non-ANSI compilers lack signed char.  */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
107 
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109    is unsigned.  */
110 #define COST_TABLE(I)  cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
111 
112 /* Stack of control and binding constructs we are currently inside.
113 
114    These constructs begin when you call `expand_start_WHATEVER'
115    and end when you call `expand_end_WHATEVER'.  This stack records
116    info about how the construct began that tells the end-function
117    what to do.  It also may provide information about the construct
118    to alter the behavior of other constructs within the body.
119    For example, they may affect the behavior of C `break' and `continue'.
120 
121    Each construct gets one `struct nesting' object.
122    All of these objects are chained through the `all' field.
123    `nesting_stack' points to the first object (innermost construct).
124    The position of an entry on `nesting_stack' is in its `depth' field.
125 
126    Each type of construct has its own individual stack.
127    For example, loops have `loop_stack'.  Each object points to the
128    next object of the same type through the `next' field.
129 
130    Some constructs are visible to `break' exit-statements and others
131    are not.  Which constructs are visible depends on the language.
132    Therefore, the data structure allows each construct to be visible
133    or not, according to the args given when the construct is started.
134    The construct is visible if the `exit_label' field is non-null.
135    In that case, the value should be a CODE_LABEL rtx.  */
136 
137 struct nesting GTY(())
138 {
139   struct nesting *all;
140   struct nesting *next;
141   int depth;
142   rtx exit_label;
143   enum nesting_desc {
144     COND_NESTING,
145     LOOP_NESTING,
146     BLOCK_NESTING,
147     CASE_NESTING
148   } desc;
149   union nesting_u
150     {
151       /* For conds (if-then and if-then-else statements).  */
152       struct nesting_cond
153 	{
154 	  /* Label for the end of the if construct.
155 	     There is none if EXITFLAG was not set
156 	     and no `else' has been seen yet.  */
157 	  rtx endif_label;
158 	  /* Label for the end of this alternative.
159 	     This may be the end of the if or the next else/elseif.  */
160 	  rtx next_label;
161 	} GTY ((tag ("COND_NESTING"))) cond;
162       /* For loops.  */
163       struct nesting_loop
164 	{
165 	  /* Label at the top of the loop; place to loop back to.  */
166 	  rtx start_label;
167 	  /* Label at the end of the whole construct.  */
168 	  rtx end_label;
169 	  /* Label before a jump that branches to the end of the whole
170 	     construct.  This is where destructors go if any.  */
171 	  rtx alt_end_label;
172 	  /* Label for `continue' statement to jump to;
173 	     this is in front of the stepper of the loop.  */
174 	  rtx continue_label;
175 	} GTY ((tag ("LOOP_NESTING"))) loop;
176       /* For variable binding contours.  */
177       struct nesting_block
178 	{
179 	  /* Sequence number of this binding contour within the function,
180 	     in order of entry.  */
181 	  int block_start_count;
182 	  /* Nonzero => value to restore stack to on exit.  */
183 	  rtx stack_level;
184 	  /* The NOTE that starts this contour.
185 	     Used by expand_goto to check whether the destination
186 	     is within each contour or not.  */
187 	  rtx first_insn;
188 	  /* Innermost containing binding contour that has a stack level.  */
189 	  struct nesting *innermost_stack_block;
190 	  /* List of cleanups to be run on exit from this contour.
191 	     This is a list of expressions to be evaluated.
192 	     The TREE_PURPOSE of each link is the ..._DECL node
193 	     which the cleanup pertains to.  */
194 	  tree cleanups;
195 	  /* List of cleanup-lists of blocks containing this block,
196 	     as they were at the locus where this block appears.
197 	     There is an element for each containing block,
198 	     ordered innermost containing block first.
199 	     The tail of this list can be 0,
200 	     if all remaining elements would be empty lists.
201 	     The element's TREE_VALUE is the cleanup-list of that block,
202 	     which may be null.  */
203 	  tree outer_cleanups;
204 	  /* Chain of labels defined inside this binding contour.
205 	     For contours that have stack levels or cleanups.  */
206 	  struct label_chain *label_chain;
207 	  /* Number of function calls seen, as of start of this block.  */
208 	  int n_function_calls;
209 	  /* Nonzero if this is associated with an EH region.  */
210 	  int exception_region;
211 	  /* The saved target_temp_slot_level from our outer block.
212 	     We may reset target_temp_slot_level to be the level of
213 	     this block, if that is done, target_temp_slot_level
214 	     reverts to the saved target_temp_slot_level at the very
215 	     end of the block.  */
216 	  int block_target_temp_slot_level;
217 	  /* True if we are currently emitting insns in an area of
218 	     output code that is controlled by a conditional
219 	     expression.  This is used by the cleanup handling code to
220 	     generate conditional cleanup actions.  */
221 	  int conditional_code;
222 	  /* A place to move the start of the exception region for any
223 	     of the conditional cleanups, must be at the end or after
224 	     the start of the last unconditional cleanup, and before any
225 	     conditional branch points.  */
226 	  rtx last_unconditional_cleanup;
227 	} GTY ((tag ("BLOCK_NESTING"))) block;
228       /* For switch (C) or case (Pascal) statements,
229 	 and also for dummies (see `expand_start_case_dummy').  */
230       struct nesting_case
231 	{
232 	  /* The insn after which the case dispatch should finally
233 	     be emitted.  Zero for a dummy.  */
234 	  rtx start;
235 	  /* A list of case labels; it is first built as an AVL tree.
236 	     During expand_end_case, this is converted to a list, and may be
237 	     rearranged into a nearly balanced binary tree.  */
238 	  struct case_node *case_list;
239 	  /* Label to jump to if no case matches.  */
240 	  tree default_label;
241 	  /* The expression to be dispatched on.  */
242 	  tree index_expr;
243 	  /* Type that INDEX_EXPR should be converted to.  */
244 	  tree nominal_type;
245 	  /* Name of this kind of statement, for warnings.  */
246 	  const char *printname;
247 	  /* Used to save no_line_numbers till we see the first case label.
248 	     We set this to -1 when we see the first case label in this
249 	     case statement.  */
250 	  int line_number_status;
251 	} GTY ((tag ("CASE_NESTING"))) case_stmt;
252     } GTY ((desc ("%1.desc"))) data;
253 };
254 
255 /* Allocate and return a new `struct nesting'.  */
256 
257 #define ALLOC_NESTING() \
258  (struct nesting *) ggc_alloc (sizeof (struct nesting))
259 
260 /* Pop the nesting stack element by element until we pop off
261    the element which is at the top of STACK.
262    Update all the other stacks, popping off elements from them
263    as we pop them from nesting_stack.  */
264 
265 #define POPSTACK(STACK)					\
266 do { struct nesting *target = STACK;			\
267      struct nesting *this;				\
268      do { this = nesting_stack;				\
269 	  if (loop_stack == this)			\
270 	    loop_stack = loop_stack->next;		\
271 	  if (cond_stack == this)			\
272 	    cond_stack = cond_stack->next;		\
273 	  if (block_stack == this)			\
274 	    block_stack = block_stack->next;		\
275 	  if (stack_block_stack == this)		\
276 	    stack_block_stack = stack_block_stack->next; \
277 	  if (case_stack == this)			\
278 	    case_stack = case_stack->next;		\
279 	  nesting_depth = nesting_stack->depth - 1;	\
280 	  nesting_stack = this->all; }			\
281      while (this != target); } while (0)
282 
283 /* In some cases it is impossible to generate code for a forward goto
284    until the label definition is seen.  This happens when it may be necessary
285    for the goto to reset the stack pointer: we don't yet know how to do that.
286    So expand_goto puts an entry on this fixup list.
287    Each time a binding contour that resets the stack is exited,
288    we check each fixup.
289    If the target label has now been defined, we can insert the proper code.  */
290 
291 struct goto_fixup GTY(())
292 {
293   /* Points to following fixup.  */
294   struct goto_fixup *next;
295   /* Points to the insn before the jump insn.
296      If more code must be inserted, it goes after this insn.  */
297   rtx before_jump;
298   /* The LABEL_DECL that this jump is jumping to, or 0
299      for break, continue or return.  */
300   tree target;
301   /* The BLOCK for the place where this goto was found.  */
302   tree context;
303   /* The CODE_LABEL rtx that this is jumping to.  */
304   rtx target_rtl;
305   /* Number of binding contours started in current function
306      before the label reference.  */
307   int block_start_count;
308   /* The outermost stack level that should be restored for this jump.
309      Each time a binding contour that resets the stack is exited,
310      if the target label is *not* yet defined, this slot is updated.  */
311   rtx stack_level;
312   /* List of lists of cleanup expressions to be run by this goto.
313      There is one element for each block that this goto is within.
314      The tail of this list can be 0,
315      if all remaining elements would be empty.
316      The TREE_VALUE contains the cleanup list of that block as of the
317      time this goto was seen.
318      The TREE_ADDRESSABLE flag is 1 for a block that has been exited.  */
319   tree cleanup_list_list;
320 };
321 
322 /* Within any binding contour that must restore a stack level,
323    all labels are recorded with a chain of these structures.  */
324 
325 struct label_chain GTY(())
326 {
327   /* Points to following fixup.  */
328   struct label_chain *next;
329   tree label;
330 };
331 
332 struct stmt_status GTY(())
333 {
334   /* Chain of all pending binding contours.  */
335   struct nesting * x_block_stack;
336 
337   /* If any new stacks are added here, add them to POPSTACKS too.  */
338 
339   /* Chain of all pending binding contours that restore stack levels
340      or have cleanups.  */
341   struct nesting * x_stack_block_stack;
342 
343   /* Chain of all pending conditional statements.  */
344   struct nesting * x_cond_stack;
345 
346   /* Chain of all pending loops.  */
347   struct nesting * x_loop_stack;
348 
349   /* Chain of all pending case or switch statements.  */
350   struct nesting * x_case_stack;
351 
352   /* Separate chain including all of the above,
353      chained through the `all' field.  */
354   struct nesting * x_nesting_stack;
355 
356   /* Number of entries on nesting_stack now.  */
357   int x_nesting_depth;
358 
359   /* Number of binding contours started so far in this function.  */
360   int x_block_start_count;
361 
362   /* Each time we expand an expression-statement,
363      record the expr's type and its RTL value here.  */
364   tree x_last_expr_type;
365   rtx x_last_expr_value;
366 
367   /* Nonzero if within a ({...}) grouping, in which case we must
368      always compute a value for each expr-stmt in case it is the last one.  */
369   int x_expr_stmts_for_value;
370 
371   /* Filename and line number of last line-number note,
372      whether we actually emitted it or not.  */
373   const char *x_emit_filename;
374   int x_emit_lineno;
375 
376   struct goto_fixup *x_goto_fixup_chain;
377 };
378 
379 #define block_stack (cfun->stmt->x_block_stack)
380 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
381 #define cond_stack (cfun->stmt->x_cond_stack)
382 #define loop_stack (cfun->stmt->x_loop_stack)
383 #define case_stack (cfun->stmt->x_case_stack)
384 #define nesting_stack (cfun->stmt->x_nesting_stack)
385 #define nesting_depth (cfun->stmt->x_nesting_depth)
386 #define current_block_start_count (cfun->stmt->x_block_start_count)
387 #define last_expr_type (cfun->stmt->x_last_expr_type)
388 #define last_expr_value (cfun->stmt->x_last_expr_value)
389 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
390 #define emit_filename (cfun->stmt->x_emit_filename)
391 #define emit_lineno (cfun->stmt->x_emit_lineno)
392 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
393 
394 /* Non-zero if we are using EH to handle cleanups.  */
395 static int using_eh_for_cleanups_p = 0;
396 
397 static int n_occurrences		PARAMS ((int, const char *));
398 static bool parse_input_constraint	PARAMS ((const char **, int, int, int,
399 						 int, const char * const *,
400 						 bool *, bool *));
401 static bool decl_conflicts_with_clobbers_p PARAMS ((tree, const HARD_REG_SET));
402 static void expand_goto_internal	PARAMS ((tree, rtx, rtx));
403 static int expand_fixup			PARAMS ((tree, rtx, rtx));
404 static rtx expand_nl_handler_label	PARAMS ((rtx, rtx));
405 static void expand_nl_goto_receiver	PARAMS ((void));
406 static void expand_nl_goto_receivers	PARAMS ((struct nesting *));
407 static void fixup_gotos			PARAMS ((struct nesting *, rtx, tree,
408 					       rtx, int));
409 static bool check_operand_nalternatives	PARAMS ((tree, tree));
410 static bool check_unique_operand_names	PARAMS ((tree, tree));
411 static tree resolve_operand_names	PARAMS ((tree, tree, tree,
412 						 const char **));
413 static char *resolve_operand_name_1	PARAMS ((char *, tree, tree));
414 static void expand_null_return_1	PARAMS ((rtx));
415 static enum br_predictor return_prediction PARAMS ((rtx));
416 static void expand_value_return		PARAMS ((rtx));
417 static int tail_recursion_args		PARAMS ((tree, tree));
418 static void expand_cleanups		PARAMS ((tree, tree, int, int));
419 static void check_seenlabel		PARAMS ((void));
420 static void do_jump_if_equal		PARAMS ((rtx, rtx, rtx, int));
421 static int estimate_case_costs		PARAMS ((case_node_ptr));
422 static void group_case_nodes		PARAMS ((case_node_ptr));
423 static void balance_case_nodes		PARAMS ((case_node_ptr *,
424 					       case_node_ptr));
425 static int node_has_low_bound		PARAMS ((case_node_ptr, tree));
426 static int node_has_high_bound		PARAMS ((case_node_ptr, tree));
427 static int node_is_bounded		PARAMS ((case_node_ptr, tree));
428 static void emit_jump_if_reachable	PARAMS ((rtx));
429 static void emit_case_nodes		PARAMS ((rtx, case_node_ptr, rtx, tree));
430 static struct case_node *case_tree2list	PARAMS ((case_node *, case_node *));
431 
432 void
using_eh_for_cleanups()433 using_eh_for_cleanups ()
434 {
435   using_eh_for_cleanups_p = 1;
436 }
437 
438 void
init_stmt_for_function()439 init_stmt_for_function ()
440 {
441   cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
442 
443   /* We are not currently within any block, conditional, loop or case.  */
444   block_stack = 0;
445   stack_block_stack = 0;
446   loop_stack = 0;
447   case_stack = 0;
448   cond_stack = 0;
449   nesting_stack = 0;
450   nesting_depth = 0;
451 
452   current_block_start_count = 0;
453 
454   /* No gotos have been expanded yet.  */
455   goto_fixup_chain = 0;
456 
457   /* We are not processing a ({...}) grouping.  */
458   expr_stmts_for_value = 0;
459   clear_last_expr ();
460 }
461 
462 /* Return nonzero if anything is pushed on the loop, condition, or case
463    stack.  */
464 int
in_control_zone_p()465 in_control_zone_p ()
466 {
467   return cond_stack || loop_stack || case_stack;
468 }
469 
470 /* Record the current file and line.  Called from emit_line_note.  */
471 void
set_file_and_line_for_stmt(file,line)472 set_file_and_line_for_stmt (file, line)
473      const char *file;
474      int line;
475 {
476   /* If we're outputting an inline function, and we add a line note,
477      there may be no CFUN->STMT information.  So, there's no need to
478      update it.  */
479   if (cfun->stmt)
480     {
481       emit_filename = file;
482       emit_lineno = line;
483     }
484 }
485 
486 /* Emit a no-op instruction.  */
487 
488 void
emit_nop()489 emit_nop ()
490 {
491   rtx last_insn;
492 
493   last_insn = get_last_insn ();
494   if (!optimize
495       && (GET_CODE (last_insn) == CODE_LABEL
496 	  || (GET_CODE (last_insn) == NOTE
497 	      && prev_real_insn (last_insn) == 0)))
498     emit_insn (gen_nop ());
499 }
500 
501 /* Return the rtx-label that corresponds to a LABEL_DECL,
502    creating it if necessary.  */
503 
504 rtx
label_rtx(label)505 label_rtx (label)
506      tree label;
507 {
508   if (TREE_CODE (label) != LABEL_DECL)
509     abort ();
510 
511   if (!DECL_RTL_SET_P (label))
512     SET_DECL_RTL (label, gen_label_rtx ());
513 
514   return DECL_RTL (label);
515 }
516 
517 
518 /* Add an unconditional jump to LABEL as the next sequential instruction.  */
519 
520 void
emit_jump(label)521 emit_jump (label)
522      rtx label;
523 {
524   do_pending_stack_adjust ();
525   emit_jump_insn (gen_jump (label));
526   emit_barrier ();
527 }
528 
529 /* Emit code to jump to the address
530    specified by the pointer expression EXP.  */
531 
532 void
expand_computed_goto(exp)533 expand_computed_goto (exp)
534      tree exp;
535 {
536   rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
537 
538 #ifdef POINTERS_EXTEND_UNSIGNED
539   if (GET_MODE (x) != Pmode)
540     x = convert_memory_address (Pmode, x);
541 #endif
542 
543   emit_queue ();
544 
545   if (! cfun->computed_goto_common_label)
546     {
547       cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
548       cfun->computed_goto_common_label = gen_label_rtx ();
549 
550       do_pending_stack_adjust ();
551       emit_label (cfun->computed_goto_common_label);
552       emit_indirect_jump (cfun->computed_goto_common_reg);
553 
554       current_function_has_computed_jump = 1;
555     }
556   else
557     {
558       emit_move_insn (cfun->computed_goto_common_reg, x);
559       emit_jump (cfun->computed_goto_common_label);
560     }
561 }
562 
563 /* Handle goto statements and the labels that they can go to.  */
564 
565 /* Specify the location in the RTL code of a label LABEL,
566    which is a LABEL_DECL tree node.
567 
568    This is used for the kind of label that the user can jump to with a
569    goto statement, and for alternatives of a switch or case statement.
570    RTL labels generated for loops and conditionals don't go through here;
571    they are generated directly at the RTL level, by other functions below.
572 
573    Note that this has nothing to do with defining label *names*.
574    Languages vary in how they do that and what that even means.  */
575 
576 void
expand_label(label)577 expand_label (label)
578      tree label;
579 {
580   struct label_chain *p;
581 
582   do_pending_stack_adjust ();
583   emit_label (label_rtx (label));
584   if (DECL_NAME (label))
585     LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
586 
587   if (stack_block_stack != 0)
588     {
589       p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
590       p->next = stack_block_stack->data.block.label_chain;
591       stack_block_stack->data.block.label_chain = p;
592       p->label = label;
593     }
594 }
595 
596 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
597    from nested functions.  */
598 
599 void
declare_nonlocal_label(label)600 declare_nonlocal_label (label)
601      tree label;
602 {
603   rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
604 
605   nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
606   LABEL_PRESERVE_P (label_rtx (label)) = 1;
607   if (nonlocal_goto_handler_slots == 0)
608     {
609       emit_stack_save (SAVE_NONLOCAL,
610 		       &nonlocal_goto_stack_level,
611 		       PREV_INSN (tail_recursion_reentry));
612     }
613   nonlocal_goto_handler_slots
614     = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
615 }
616 
617 /* Generate RTL code for a `goto' statement with target label LABEL.
618    LABEL should be a LABEL_DECL tree node that was or will later be
619    defined with `expand_label'.  */
620 
621 void
expand_goto(label)622 expand_goto (label)
623      tree label;
624 {
625   tree context;
626 
627   /* Check for a nonlocal goto to a containing function.  */
628   context = decl_function_context (label);
629   if (context != 0 && context != current_function_decl)
630     {
631       struct function *p = find_function_data (context);
632       rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
633       rtx handler_slot, static_chain, save_area, insn;
634       tree link;
635 
636       /* Find the corresponding handler slot for this label.  */
637       handler_slot = p->x_nonlocal_goto_handler_slots;
638       for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
639 	   link = TREE_CHAIN (link))
640 	handler_slot = XEXP (handler_slot, 1);
641       handler_slot = XEXP (handler_slot, 0);
642 
643       p->has_nonlocal_label = 1;
644       current_function_has_nonlocal_goto = 1;
645       LABEL_REF_NONLOCAL_P (label_ref) = 1;
646 
647       /* Copy the rtl for the slots so that they won't be shared in
648 	 case the virtual stack vars register gets instantiated differently
649 	 in the parent than in the child.  */
650 
651       static_chain = copy_to_reg (lookup_static_chain (label));
652 
653       /* Get addr of containing function's current nonlocal goto handler,
654 	 which will do any cleanups and then jump to the label.  */
655       handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
656 					       virtual_stack_vars_rtx,
657 					       static_chain));
658 
659       /* Get addr of containing function's nonlocal save area.  */
660       save_area = p->x_nonlocal_goto_stack_level;
661       if (save_area)
662 	save_area = replace_rtx (copy_rtx (save_area),
663 				 virtual_stack_vars_rtx, static_chain);
664 
665 #if HAVE_nonlocal_goto
666       if (HAVE_nonlocal_goto)
667 	emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
668 				      save_area, label_ref));
669       else
670 #endif
671 	{
672 	  /* Restore frame pointer for containing function.
673 	     This sets the actual hard register used for the frame pointer
674 	     to the location of the function's incoming static chain info.
675 	     The non-local goto handler will then adjust it to contain the
676 	     proper value and reload the argument pointer, if needed.  */
677 	  emit_move_insn (hard_frame_pointer_rtx, static_chain);
678 	  emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
679 
680 	  /* USE of hard_frame_pointer_rtx added for consistency;
681 	     not clear if really needed.  */
682 	  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
683 	  emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
684 	  emit_indirect_jump (handler_slot);
685 	}
686 
687       /* Search backwards to the jump insn and mark it as a
688 	 non-local goto.  */
689       for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
690 	{
691 	  if (GET_CODE (insn) == JUMP_INSN)
692 	    {
693 	      REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
694 						  const0_rtx, REG_NOTES (insn));
695 	      break;
696 	    }
697 	  else if (GET_CODE (insn) == CALL_INSN)
698 	      break;
699 	}
700     }
701   else
702     expand_goto_internal (label, label_rtx (label), NULL_RTX);
703 }
704 
705 /* Generate RTL code for a `goto' statement with target label BODY.
706    LABEL should be a LABEL_REF.
707    LAST_INSN, if non-0, is the rtx we should consider as the last
708    insn emitted (for the purposes of cleaning up a return).  */
709 
710 static void
expand_goto_internal(body,label,last_insn)711 expand_goto_internal (body, label, last_insn)
712      tree body;
713      rtx label;
714      rtx last_insn;
715 {
716   struct nesting *block;
717   rtx stack_level = 0;
718 
719   if (GET_CODE (label) != CODE_LABEL)
720     abort ();
721 
722   /* If label has already been defined, we can tell now
723      whether and how we must alter the stack level.  */
724 
725   if (PREV_INSN (label) != 0)
726     {
727       /* Find the innermost pending block that contains the label.
728 	 (Check containment by comparing insn-uids.)
729 	 Then restore the outermost stack level within that block,
730 	 and do cleanups of all blocks contained in it.  */
731       for (block = block_stack; block; block = block->next)
732 	{
733 	  if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
734 	    break;
735 	  if (block->data.block.stack_level != 0)
736 	    stack_level = block->data.block.stack_level;
737 	  /* Execute the cleanups for blocks we are exiting.  */
738 	  if (block->data.block.cleanups != 0)
739 	    {
740 	      expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
741 	      do_pending_stack_adjust ();
742 	    }
743 	}
744 
745       if (stack_level)
746 	{
747 	  /* Ensure stack adjust isn't done by emit_jump, as this
748 	     would clobber the stack pointer.  This one should be
749 	     deleted as dead by flow.  */
750 	  clear_pending_stack_adjust ();
751 	  do_pending_stack_adjust ();
752 
753 	  /* Don't do this adjust if it's to the end label and this function
754 	     is to return with a depressed stack pointer.  */
755 	  if (label == return_label
756 	      && (((TREE_CODE (TREE_TYPE (current_function_decl))
757 		   == FUNCTION_TYPE)
758 		   && (TYPE_RETURNS_STACK_DEPRESSED
759 		       (TREE_TYPE (current_function_decl))))))
760 	    ;
761 	  else
762 	    emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
763 	}
764 
765       if (body != 0 && DECL_TOO_LATE (body))
766 	error ("jump to `%s' invalidly jumps into binding contour",
767 	       IDENTIFIER_POINTER (DECL_NAME (body)));
768     }
769   /* Label not yet defined: may need to put this goto
770      on the fixup list.  */
771   else if (! expand_fixup (body, label, last_insn))
772     {
773       /* No fixup needed.  Record that the label is the target
774 	 of at least one goto that has no fixup.  */
775       if (body != 0)
776 	TREE_ADDRESSABLE (body) = 1;
777     }
778 
779   emit_jump (label);
780 }
781 
782 /* Generate if necessary a fixup for a goto
783    whose target label in tree structure (if any) is TREE_LABEL
784    and whose target in rtl is RTL_LABEL.
785 
786    If LAST_INSN is nonzero, we pretend that the jump appears
787    after insn LAST_INSN instead of at the current point in the insn stream.
788 
789    The fixup will be used later to insert insns just before the goto.
790    Those insns will restore the stack level as appropriate for the
791    target label, and will (in the case of C++) also invoke any object
792    destructors which have to be invoked when we exit the scopes which
793    are exited by the goto.
794 
795    Value is nonzero if a fixup is made.  */
796 
797 static int
expand_fixup(tree_label,rtl_label,last_insn)798 expand_fixup (tree_label, rtl_label, last_insn)
799      tree tree_label;
800      rtx rtl_label;
801      rtx last_insn;
802 {
803   struct nesting *block, *end_block;
804 
805   /* See if we can recognize which block the label will be output in.
806      This is possible in some very common cases.
807      If we succeed, set END_BLOCK to that block.
808      Otherwise, set it to 0.  */
809 
810   if (cond_stack
811       && (rtl_label == cond_stack->data.cond.endif_label
812 	  || rtl_label == cond_stack->data.cond.next_label))
813     end_block = cond_stack;
814   /* If we are in a loop, recognize certain labels which
815      are likely targets.  This reduces the number of fixups
816      we need to create.  */
817   else if (loop_stack
818       && (rtl_label == loop_stack->data.loop.start_label
819 	  || rtl_label == loop_stack->data.loop.end_label
820 	  || rtl_label == loop_stack->data.loop.continue_label))
821     end_block = loop_stack;
822   else
823     end_block = 0;
824 
825   /* Now set END_BLOCK to the binding level to which we will return.  */
826 
827   if (end_block)
828     {
829       struct nesting *next_block = end_block->all;
830       block = block_stack;
831 
832       /* First see if the END_BLOCK is inside the innermost binding level.
833 	 If so, then no cleanups or stack levels are relevant.  */
834       while (next_block && next_block != block)
835 	next_block = next_block->all;
836 
837       if (next_block)
838 	return 0;
839 
840       /* Otherwise, set END_BLOCK to the innermost binding level
841 	 which is outside the relevant control-structure nesting.  */
842       next_block = block_stack->next;
843       for (block = block_stack; block != end_block; block = block->all)
844 	if (block == next_block)
845 	  next_block = next_block->next;
846       end_block = next_block;
847     }
848 
849   /* Does any containing block have a stack level or cleanups?
850      If not, no fixup is needed, and that is the normal case
851      (the only case, for standard C).  */
852   for (block = block_stack; block != end_block; block = block->next)
853     if (block->data.block.stack_level != 0
854 	|| block->data.block.cleanups != 0)
855       break;
856 
857   if (block != end_block)
858     {
859       /* Ok, a fixup is needed.  Add a fixup to the list of such.  */
860       struct goto_fixup *fixup
861 	= (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
862       /* In case an old stack level is restored, make sure that comes
863 	 after any pending stack adjust.  */
864       /* ?? If the fixup isn't to come at the present position,
865 	 doing the stack adjust here isn't useful.  Doing it with our
866 	 settings at that location isn't useful either.  Let's hope
867 	 someone does it!  */
868       if (last_insn == 0)
869 	do_pending_stack_adjust ();
870       fixup->target = tree_label;
871       fixup->target_rtl = rtl_label;
872 
873       /* Create a BLOCK node and a corresponding matched set of
874 	 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
875 	 this point.  The notes will encapsulate any and all fixup
876 	 code which we might later insert at this point in the insn
877 	 stream.  Also, the BLOCK node will be the parent (i.e. the
878 	 `SUPERBLOCK') of any other BLOCK nodes which we might create
879 	 later on when we are expanding the fixup code.
880 
881 	 Note that optimization passes (including expand_end_loop)
882 	 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
883 	 as a placeholder.  */
884 
885       {
886 	rtx original_before_jump
887 	  = last_insn ? last_insn : get_last_insn ();
888 	rtx start;
889 	rtx end;
890 	tree block;
891 
892 	block = make_node (BLOCK);
893 	TREE_USED (block) = 1;
894 
895 	if (!cfun->x_whole_function_mode_p)
896 	  (*lang_hooks.decls.insert_block) (block);
897 	else
898 	  {
899 	    BLOCK_CHAIN (block)
900 	      = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
901 	    BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
902 	      = block;
903 	  }
904 
905 	start_sequence ();
906 	start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
907 	if (cfun->x_whole_function_mode_p)
908 	  NOTE_BLOCK (start) = block;
909 	fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
910 	end = emit_note (NULL, NOTE_INSN_BLOCK_END);
911 	if (cfun->x_whole_function_mode_p)
912 	  NOTE_BLOCK (end) = block;
913 	fixup->context = block;
914 	end_sequence ();
915 	emit_insn_after (start, original_before_jump);
916       }
917 
918       fixup->block_start_count = current_block_start_count;
919       fixup->stack_level = 0;
920       fixup->cleanup_list_list
921 	= ((block->data.block.outer_cleanups
922 	    || block->data.block.cleanups)
923 	   ? tree_cons (NULL_TREE, block->data.block.cleanups,
924 			block->data.block.outer_cleanups)
925 	   : 0);
926       fixup->next = goto_fixup_chain;
927       goto_fixup_chain = fixup;
928     }
929 
930   return block != 0;
931 }
932 
933 /* Expand any needed fixups in the outputmost binding level of the
934    function.  FIRST_INSN is the first insn in the function.  */
935 
936 void
expand_fixups(first_insn)937 expand_fixups (first_insn)
938      rtx first_insn;
939 {
940   fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
941 }
942 
943 /* When exiting a binding contour, process all pending gotos requiring fixups.
944    THISBLOCK is the structure that describes the block being exited.
945    STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
946    CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
947    FIRST_INSN is the insn that began this contour.
948 
949    Gotos that jump out of this contour must restore the
950    stack level and do the cleanups before actually jumping.
951 
952    DONT_JUMP_IN nonzero means report error there is a jump into this
953    contour from before the beginning of the contour.
954    This is also done if STACK_LEVEL is nonzero.  */
955 
956 static void
fixup_gotos(thisblock,stack_level,cleanup_list,first_insn,dont_jump_in)957 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
958      struct nesting *thisblock;
959      rtx stack_level;
960      tree cleanup_list;
961      rtx first_insn;
962      int dont_jump_in;
963 {
964   struct goto_fixup *f, *prev;
965 
966   /* F is the fixup we are considering; PREV is the previous one.  */
967   /* We run this loop in two passes so that cleanups of exited blocks
968      are run first, and blocks that are exited are marked so
969      afterwards.  */
970 
971   for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
972     {
973       /* Test for a fixup that is inactive because it is already handled.  */
974       if (f->before_jump == 0)
975 	{
976 	  /* Delete inactive fixup from the chain, if that is easy to do.  */
977 	  if (prev != 0)
978 	    prev->next = f->next;
979 	}
980       /* Has this fixup's target label been defined?
981 	 If so, we can finalize it.  */
982       else if (PREV_INSN (f->target_rtl) != 0)
983 	{
984 	  rtx cleanup_insns;
985 
986 	  /* If this fixup jumped into this contour from before the beginning
987 	     of this contour, report an error.   This code used to use
988 	     the first non-label insn after f->target_rtl, but that's
989 	     wrong since such can be added, by things like put_var_into_stack
990 	     and have INSN_UIDs that are out of the range of the block.  */
991 	  /* ??? Bug: this does not detect jumping in through intermediate
992 	     blocks that have stack levels or cleanups.
993 	     It detects only a problem with the innermost block
994 	     around the label.  */
995 	  if (f->target != 0
996 	      && (dont_jump_in || stack_level || cleanup_list)
997 	      && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
998 	      && INSN_UID (first_insn) > INSN_UID (f->before_jump)
999 	      && ! DECL_ERROR_ISSUED (f->target))
1000 	    {
1001 	      error_with_decl (f->target,
1002 			       "label `%s' used before containing binding contour");
1003 	      /* Prevent multiple errors for one label.  */
1004 	      DECL_ERROR_ISSUED (f->target) = 1;
1005 	    }
1006 
1007 	  /* We will expand the cleanups into a sequence of their own and
1008 	     then later on we will attach this new sequence to the insn
1009 	     stream just ahead of the actual jump insn.  */
1010 
1011 	  start_sequence ();
1012 
1013 	  /* Temporarily restore the lexical context where we will
1014 	     logically be inserting the fixup code.  We do this for the
1015 	     sake of getting the debugging information right.  */
1016 
1017 	  (*lang_hooks.decls.pushlevel) (0);
1018 	  (*lang_hooks.decls.set_block) (f->context);
1019 
1020 	  /* Expand the cleanups for blocks this jump exits.  */
1021 	  if (f->cleanup_list_list)
1022 	    {
1023 	      tree lists;
1024 	      for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1025 		/* Marked elements correspond to blocks that have been closed.
1026 		   Do their cleanups.  */
1027 		if (TREE_ADDRESSABLE (lists)
1028 		    && TREE_VALUE (lists) != 0)
1029 		  {
1030 		    expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1031 		    /* Pop any pushes done in the cleanups,
1032 		       in case function is about to return.  */
1033 		    do_pending_stack_adjust ();
1034 		  }
1035 	    }
1036 
1037 	  /* Restore stack level for the biggest contour that this
1038 	     jump jumps out of.  */
1039 	  if (f->stack_level
1040 	      && ! (f->target_rtl == return_label
1041 		    && ((TREE_CODE (TREE_TYPE (current_function_decl))
1042 			 == FUNCTION_TYPE)
1043 			&& (TYPE_RETURNS_STACK_DEPRESSED
1044 			    (TREE_TYPE (current_function_decl))))))
1045 	    emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1046 
1047 	  /* Finish up the sequence containing the insns which implement the
1048 	     necessary cleanups, and then attach that whole sequence to the
1049 	     insn stream just ahead of the actual jump insn.  Attaching it
1050 	     at that point insures that any cleanups which are in fact
1051 	     implicit C++ object destructions (which must be executed upon
1052 	     leaving the block) appear (to the debugger) to be taking place
1053 	     in an area of the generated code where the object(s) being
1054 	     destructed are still "in scope".  */
1055 
1056 	  cleanup_insns = get_insns ();
1057 	  (*lang_hooks.decls.poplevel) (1, 0, 0);
1058 
1059 	  end_sequence ();
1060 	  emit_insn_after (cleanup_insns, f->before_jump);
1061 
1062 	  f->before_jump = 0;
1063 	}
1064     }
1065 
1066   /* For any still-undefined labels, do the cleanups for this block now.
1067      We must do this now since items in the cleanup list may go out
1068      of scope when the block ends.  */
1069   for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1070     if (f->before_jump != 0
1071 	&& PREV_INSN (f->target_rtl) == 0
1072 	/* Label has still not appeared.  If we are exiting a block with
1073 	   a stack level to restore, that started before the fixup,
1074 	   mark this stack level as needing restoration
1075 	   when the fixup is later finalized.  */
1076 	&& thisblock != 0
1077 	/* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1078 	   means the label is undefined.  That's erroneous, but possible.  */
1079 	&& (thisblock->data.block.block_start_count
1080 	    <= f->block_start_count))
1081       {
1082 	tree lists = f->cleanup_list_list;
1083 	rtx cleanup_insns;
1084 
1085 	for (; lists; lists = TREE_CHAIN (lists))
1086 	  /* If the following elt. corresponds to our containing block
1087 	     then the elt. must be for this block.  */
1088 	  if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1089 	    {
1090 	      start_sequence ();
1091 	      (*lang_hooks.decls.pushlevel) (0);
1092 	      (*lang_hooks.decls.set_block) (f->context);
1093 	      expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1094 	      do_pending_stack_adjust ();
1095 	      cleanup_insns = get_insns ();
1096 	      (*lang_hooks.decls.poplevel) (1, 0, 0);
1097 	      end_sequence ();
1098 	      if (cleanup_insns != 0)
1099 		f->before_jump
1100 		  = emit_insn_after (cleanup_insns, f->before_jump);
1101 
1102 	      f->cleanup_list_list = TREE_CHAIN (lists);
1103 	    }
1104 
1105 	if (stack_level)
1106 	  f->stack_level = stack_level;
1107       }
1108 }
1109 
1110 /* Return the number of times character C occurs in string S.  */
1111 static int
n_occurrences(c,s)1112 n_occurrences (c, s)
1113      int c;
1114      const char *s;
1115 {
1116   int n = 0;
1117   while (*s)
1118     n += (*s++ == c);
1119   return n;
1120 }
1121 
1122 /* Generate RTL for an asm statement (explicit assembler code).
1123    STRING is a STRING_CST node containing the assembler code text,
1124    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
1125    insn is volatile; don't optimize it.  */
1126 
1127 void
expand_asm(string,vol)1128 expand_asm (string, vol)
1129      tree string;
1130      int vol;
1131 {
1132   rtx body;
1133 
1134   if (TREE_CODE (string) == ADDR_EXPR)
1135     string = TREE_OPERAND (string, 0);
1136 
1137   body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1138 
1139   MEM_VOLATILE_P (body) = vol;
1140 
1141   emit_insn (body);
1142 
1143   clear_last_expr ();
1144 }
1145 
1146 /* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
1147    OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
1148    inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
1149    *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1150    memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
1151    constraint allows the use of a register operand.  And, *IS_INOUT
1152    will be true if the operand is read-write, i.e., if it is used as
1153    an input as well as an output.  If *CONSTRAINT_P is not in
1154    canonical form, it will be made canonical.  (Note that `+' will be
1155    rpelaced with `=' as part of this process.)
1156 
1157    Returns TRUE if all went well; FALSE if an error occurred.  */
1158 
1159 bool
parse_output_constraint(constraint_p,operand_num,ninputs,noutputs,allows_mem,allows_reg,is_inout)1160 parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1161 			 allows_mem, allows_reg, is_inout)
1162      const char **constraint_p;
1163      int operand_num;
1164      int ninputs;
1165      int noutputs;
1166      bool *allows_mem;
1167      bool *allows_reg;
1168      bool *is_inout;
1169 {
1170   const char *constraint = *constraint_p;
1171   const char *p;
1172 
1173   /* Assume the constraint doesn't allow the use of either a register
1174      or memory.  */
1175   *allows_mem = false;
1176   *allows_reg = false;
1177 
1178   /* Allow the `=' or `+' to not be at the beginning of the string,
1179      since it wasn't explicitly documented that way, and there is a
1180      large body of code that puts it last.  Swap the character to
1181      the front, so as not to uglify any place else.  */
1182   p = strchr (constraint, '=');
1183   if (!p)
1184     p = strchr (constraint, '+');
1185 
1186   /* If the string doesn't contain an `=', issue an error
1187      message.  */
1188   if (!p)
1189     {
1190       error ("output operand constraint lacks `='");
1191       return false;
1192     }
1193 
1194   /* If the constraint begins with `+', then the operand is both read
1195      from and written to.  */
1196   *is_inout = (*p == '+');
1197 
1198   /* Canonicalize the output constraint so that it begins with `='.  */
1199   if (p != constraint || is_inout)
1200     {
1201       char *buf;
1202       size_t c_len = strlen (constraint);
1203 
1204       if (p != constraint)
1205 	warning ("output constraint `%c' for operand %d is not at the beginning",
1206 		 *p, operand_num);
1207 
1208       /* Make a copy of the constraint.  */
1209       buf = alloca (c_len + 1);
1210       strcpy (buf, constraint);
1211       /* Swap the first character and the `=' or `+'.  */
1212       buf[p - constraint] = buf[0];
1213       /* Make sure the first character is an `='.  (Until we do this,
1214 	 it might be a `+'.)  */
1215       buf[0] = '=';
1216       /* Replace the constraint with the canonicalized string.  */
1217       *constraint_p = ggc_alloc_string (buf, c_len);
1218       constraint = *constraint_p;
1219     }
1220 
1221   /* Loop through the constraint string.  */
1222   for (p = constraint + 1; *p; ++p)
1223     switch (*p)
1224       {
1225       case '+':
1226       case '=':
1227 	error ("operand constraint contains incorrectly positioned '+' or '='");
1228 	return false;
1229 
1230       case '%':
1231 	if (operand_num + 1 == ninputs + noutputs)
1232 	  {
1233 	    error ("`%%' constraint used with last operand");
1234 	    return false;
1235 	  }
1236 	break;
1237 
1238       case 'V':  case 'm':  case 'o':
1239 	*allows_mem = true;
1240 	break;
1241 
1242       case '?':  case '!':  case '*':  case '&':  case '#':
1243       case 'E':  case 'F':  case 'G':  case 'H':
1244       case 's':  case 'i':  case 'n':
1245       case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
1246       case 'N':  case 'O':  case 'P':  case ',':
1247 	break;
1248 
1249       case '0':  case '1':  case '2':  case '3':  case '4':
1250       case '5':  case '6':  case '7':  case '8':  case '9':
1251       case '[':
1252 	error ("matching constraint not valid in output operand");
1253 	return false;
1254 
1255       case '<':  case '>':
1256 	/* ??? Before flow, auto inc/dec insns are not supposed to exist,
1257 	   excepting those that expand_call created.  So match memory
1258 	   and hope.  */
1259 	*allows_mem = true;
1260 	break;
1261 
1262       case 'g':  case 'X':
1263 	*allows_reg = true;
1264 	*allows_mem = true;
1265 	break;
1266 
1267       case 'p': case 'r':
1268 	*allows_reg = true;
1269 	break;
1270 
1271       default:
1272 	if (!ISALPHA (*p))
1273 	  break;
1274 	if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1275 	  *allows_reg = true;
1276 #ifdef EXTRA_CONSTRAINT
1277 	else if (EXTRA_ADDRESS_CONSTRAINT (*p))
1278 	  *allows_reg = true;
1279 	else if (EXTRA_MEMORY_CONSTRAINT (*p))
1280 	  *allows_mem = true;
1281 	else
1282 	  {
1283 	    /* Otherwise we can't assume anything about the nature of
1284 	       the constraint except that it isn't purely registers.
1285 	       Treat it like "g" and hope for the best.  */
1286 	    *allows_reg = true;
1287 	    *allows_mem = true;
1288 	  }
1289 #endif
1290 	break;
1291       }
1292 
1293   return true;
1294 }
1295 
1296 /* Similar, but for input constraints.  */
1297 
1298 static bool
parse_input_constraint(constraint_p,input_num,ninputs,noutputs,ninout,constraints,allows_mem,allows_reg)1299 parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1300 			constraints, allows_mem, allows_reg)
1301      const char **constraint_p;
1302      int input_num;
1303      int ninputs;
1304      int noutputs;
1305      int ninout;
1306      const char * const * constraints;
1307      bool *allows_mem;
1308      bool *allows_reg;
1309 {
1310   const char *constraint = *constraint_p;
1311   const char *orig_constraint = constraint;
1312   size_t c_len = strlen (constraint);
1313   size_t j;
1314 
1315   /* Assume the constraint doesn't allow the use of either
1316      a register or memory.  */
1317   *allows_mem = false;
1318   *allows_reg = false;
1319 
1320   /* Make sure constraint has neither `=', `+', nor '&'.  */
1321 
1322   for (j = 0; j < c_len; j++)
1323     switch (constraint[j])
1324       {
1325       case '+':  case '=':  case '&':
1326 	if (constraint == orig_constraint)
1327 	  {
1328 	    error ("input operand constraint contains `%c'", constraint[j]);
1329 	    return false;
1330 	  }
1331 	break;
1332 
1333       case '%':
1334 	if (constraint == orig_constraint
1335 	    && input_num + 1 == ninputs - ninout)
1336 	  {
1337 	    error ("`%%' constraint used with last operand");
1338 	    return false;
1339 	  }
1340 	break;
1341 
1342       case 'V':  case 'm':  case 'o':
1343 	*allows_mem = true;
1344 	break;
1345 
1346       case '<':  case '>':
1347       case '?':  case '!':  case '*':  case '#':
1348       case 'E':  case 'F':  case 'G':  case 'H':
1349       case 's':  case 'i':  case 'n':
1350       case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
1351       case 'N':  case 'O':  case 'P':  case ',':
1352 	break;
1353 
1354 	/* Whether or not a numeric constraint allows a register is
1355 	   decided by the matching constraint, and so there is no need
1356 	   to do anything special with them.  We must handle them in
1357 	   the default case, so that we don't unnecessarily force
1358 	   operands to memory.  */
1359       case '0':  case '1':  case '2':  case '3':  case '4':
1360       case '5':  case '6':  case '7':  case '8':  case '9':
1361 	{
1362 	  char *end;
1363 	  unsigned long match;
1364 
1365 	  match = strtoul (constraint + j, &end, 10);
1366 	  if (match >= (unsigned long) noutputs)
1367 	    {
1368 	      error ("matching constraint references invalid operand number");
1369 	      return false;
1370 	    }
1371 
1372 	  /* Try and find the real constraint for this dup.  Only do this
1373 	     if the matching constraint is the only alternative.  */
1374 	  if (*end == '\0'
1375 	      && (j == 0 || (j == 1 && constraint[0] == '%')))
1376 	    {
1377 	      constraint = constraints[match];
1378 	      *constraint_p = constraint;
1379 	      c_len = strlen (constraint);
1380 	      j = 0;
1381 	      break;
1382 	    }
1383 	  else
1384 	    j = end - constraint;
1385 	}
1386 	/* Fall through.  */
1387 
1388       case 'p':  case 'r':
1389 	*allows_reg = true;
1390 	break;
1391 
1392       case 'g':  case 'X':
1393 	*allows_reg = true;
1394 	*allows_mem = true;
1395 	break;
1396 
1397       default:
1398 	if (! ISALPHA (constraint[j]))
1399 	  {
1400 	    error ("invalid punctuation `%c' in constraint", constraint[j]);
1401 	    return false;
1402 	  }
1403 	if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1404 	  *allows_reg = true;
1405 #ifdef EXTRA_CONSTRAINT
1406 	else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j]))
1407 	  *allows_reg = true;
1408 	else if (EXTRA_MEMORY_CONSTRAINT (constraint[j]))
1409 	  *allows_mem = true;
1410 	else
1411 	  {
1412 	    /* Otherwise we can't assume anything about the nature of
1413 	       the constraint except that it isn't purely registers.
1414 	       Treat it like "g" and hope for the best.  */
1415 	    *allows_reg = true;
1416 	    *allows_mem = true;
1417 	  }
1418 #endif
1419 	break;
1420       }
1421 
1422   return true;
1423 }
1424 
1425 /* Check for overlap between registers marked in CLOBBERED_REGS and
1426    anything inappropriate in DECL.  Emit error and return TRUE for error,
1427    FALSE for ok.  */
1428 
1429 static bool
decl_conflicts_with_clobbers_p(decl,clobbered_regs)1430 decl_conflicts_with_clobbers_p (decl, clobbered_regs)
1431      tree decl;
1432      const HARD_REG_SET clobbered_regs;
1433 {
1434   /* Conflicts between asm-declared register variables and the clobber
1435      list are not allowed.  */
1436   if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1437       && DECL_REGISTER (decl)
1438       && REG_P (DECL_RTL (decl))
1439       && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1440     {
1441       rtx reg = DECL_RTL (decl);
1442       unsigned int regno;
1443 
1444       for (regno = REGNO (reg);
1445 	   regno < (REGNO (reg)
1446 		    + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1447 	   regno++)
1448 	if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1449 	  {
1450 	    error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1451 		   IDENTIFIER_POINTER (DECL_NAME (decl)));
1452 
1453 	    /* Reset registerness to stop multiple errors emitted for a
1454 	       single variable.  */
1455 	    DECL_REGISTER (decl) = 0;
1456 	    return true;
1457 	  }
1458     }
1459   return false;
1460 }
1461 
1462 /* Generate RTL for an asm statement with arguments.
1463    STRING is the instruction template.
1464    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1465    Each output or input has an expression in the TREE_VALUE and
1466    and a tree list in TREE_PURPOSE which in turn contains a constraint
1467    name in TREE_VALUE (or NULL_TREE) and a constraint string
1468    in TREE_PURPOSE.
1469    CLOBBERS is a list of STRING_CST nodes each naming a hard register
1470    that is clobbered by this insn.
1471 
1472    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1473    Some elements of OUTPUTS may be replaced with trees representing temporary
1474    values.  The caller should copy those temporary values to the originally
1475    specified lvalues.
1476 
1477    VOL nonzero means the insn is volatile; don't optimize it.  */
1478 
1479 void
expand_asm_operands(string,outputs,inputs,clobbers,vol,filename,line)1480 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1481      tree string, outputs, inputs, clobbers;
1482      int vol;
1483      const char *filename;
1484      int line;
1485 {
1486   rtvec argvec, constraintvec;
1487   rtx body;
1488   int ninputs = list_length (inputs);
1489   int noutputs = list_length (outputs);
1490   int ninout;
1491   int nclobbers;
1492   HARD_REG_SET clobbered_regs;
1493   int clobber_conflict_found = 0;
1494   tree tail;
1495   int i;
1496   /* Vector of RTX's of evaluated output operands.  */
1497   rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1498   int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1499   rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1500   enum machine_mode *inout_mode
1501     = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1502   const char **constraints
1503     = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1504   /* The insn we have emitted.  */
1505   rtx insn;
1506   int old_generating_concat_p = generating_concat_p;
1507 
1508   /* An ASM with no outputs needs to be treated as volatile, for now.  */
1509   if (noutputs == 0)
1510     vol = 1;
1511 
1512   if (! check_operand_nalternatives (outputs, inputs))
1513     return;
1514 
1515   if (! check_unique_operand_names (outputs, inputs))
1516     return;
1517 
1518   string = resolve_operand_names (string, outputs, inputs, constraints);
1519 
1520 #ifdef MD_ASM_CLOBBERS
1521   /* Sometimes we wish to automatically clobber registers across an asm.
1522      Case in point is when the i386 backend moved from cc0 to a hard reg --
1523      maintaining source-level compatibility means automatically clobbering
1524      the flags register.  */
1525   MD_ASM_CLOBBERS (clobbers);
1526 #endif
1527 
1528   /* Count the number of meaningful clobbered registers, ignoring what
1529      we would ignore later.  */
1530   nclobbers = 0;
1531   CLEAR_HARD_REG_SET (clobbered_regs);
1532   for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1533     {
1534       const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1535 
1536       i = decode_reg_name (regname);
1537       if (i >= 0 || i == -4)
1538 	++nclobbers;
1539       else if (i == -2)
1540 	error ("unknown register name `%s' in `asm'", regname);
1541 
1542       /* Mark clobbered registers.  */
1543       if (i >= 0)
1544 	SET_HARD_REG_BIT (clobbered_regs, i);
1545     }
1546 
1547   clear_last_expr ();
1548 
1549   /* First pass over inputs and outputs checks validity and sets
1550      mark_addressable if needed.  */
1551 
1552   ninout = 0;
1553   for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1554     {
1555       tree val = TREE_VALUE (tail);
1556       tree type = TREE_TYPE (val);
1557       const char *constraint;
1558       bool is_inout;
1559       bool allows_reg;
1560       bool allows_mem;
1561 
1562       /* If there's an erroneous arg, emit no insn.  */
1563       if (type == error_mark_node)
1564 	return;
1565 
1566       /* Try to parse the output constraint.  If that fails, there's
1567 	 no point in going further.  */
1568       constraint = constraints[i];
1569       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1570 				    &allows_mem, &allows_reg, &is_inout))
1571 	return;
1572 
1573       if (! allows_reg
1574 	  && (allows_mem
1575 	      || is_inout
1576 	      || (DECL_P (val)
1577 		  && GET_CODE (DECL_RTL (val)) == REG
1578 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1579 	(*lang_hooks.mark_addressable) (val);
1580 
1581       if (is_inout)
1582 	ninout++;
1583     }
1584 
1585   ninputs += ninout;
1586   if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1587     {
1588       error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1589       return;
1590     }
1591 
1592   for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1593     {
1594       bool allows_reg, allows_mem;
1595       const char *constraint;
1596 
1597       /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1598 	 would get VOIDmode and that could cause a crash in reload.  */
1599       if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1600 	return;
1601 
1602       constraint = constraints[i + noutputs];
1603       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1604 				    constraints, &allows_mem, &allows_reg))
1605 	return;
1606 
1607       if (! allows_reg && allows_mem)
1608 	(*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1609     }
1610 
1611   /* Second pass evaluates arguments.  */
1612 
1613   ninout = 0;
1614   for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1615     {
1616       tree val = TREE_VALUE (tail);
1617       tree type = TREE_TYPE (val);
1618       bool is_inout;
1619       bool allows_reg;
1620       bool allows_mem;
1621       rtx op;
1622 
1623       if (!parse_output_constraint (&constraints[i], i, ninputs,
1624 				    noutputs, &allows_mem, &allows_reg,
1625 				    &is_inout))
1626 	abort ();
1627 
1628       /* If an output operand is not a decl or indirect ref and our constraint
1629 	 allows a register, make a temporary to act as an intermediate.
1630 	 Make the asm insn write into that, then our caller will copy it to
1631 	 the real output operand.  Likewise for promoted variables.  */
1632 
1633       generating_concat_p = 0;
1634 
1635       real_output_rtx[i] = NULL_RTX;
1636       if ((TREE_CODE (val) == INDIRECT_REF
1637 	   && allows_mem)
1638 	  || (DECL_P (val)
1639 	      && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1640 	      && ! (GET_CODE (DECL_RTL (val)) == REG
1641 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1642 	  || ! allows_reg
1643 	  || is_inout)
1644 	{
1645 	  op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1646 	  if (GET_CODE (op) == MEM)
1647 	    op = validize_mem (op);
1648 
1649 	  if (! allows_reg && GET_CODE (op) != MEM)
1650 	    error ("output number %d not directly addressable", i);
1651 	  if ((! allows_mem && GET_CODE (op) == MEM)
1652 	      || GET_CODE (op) == CONCAT)
1653 	    {
1654 	      real_output_rtx[i] = protect_from_queue (op, 1);
1655 	      op = gen_reg_rtx (GET_MODE (op));
1656 	      if (is_inout)
1657 		emit_move_insn (op, real_output_rtx[i]);
1658 	    }
1659 	}
1660       else
1661 	{
1662 	  op = assign_temp (type, 0, 0, 1);
1663 	  op = validize_mem (op);
1664 	  TREE_VALUE (tail) = make_tree (type, op);
1665 	}
1666       output_rtx[i] = op;
1667 
1668       generating_concat_p = old_generating_concat_p;
1669 
1670       if (is_inout)
1671 	{
1672 	  inout_mode[ninout] = TYPE_MODE (type);
1673 	  inout_opnum[ninout++] = i;
1674 	}
1675 
1676       if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1677 	clobber_conflict_found = 1;
1678     }
1679 
1680   /* Make vectors for the expression-rtx, constraint strings,
1681      and named operands.  */
1682 
1683   argvec = rtvec_alloc (ninputs);
1684   constraintvec = rtvec_alloc (ninputs);
1685 
1686   body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1687 				: GET_MODE (output_rtx[0])),
1688 			       TREE_STRING_POINTER (string),
1689 			       empty_string, 0, argvec, constraintvec,
1690 			       filename, line);
1691 
1692   MEM_VOLATILE_P (body) = vol;
1693 
1694   /* Eval the inputs and put them into ARGVEC.
1695      Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
1696 
1697   for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1698     {
1699       bool allows_reg, allows_mem;
1700       const char *constraint;
1701       tree val, type;
1702       rtx op;
1703 
1704       constraint = constraints[i + noutputs];
1705       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1706 				    constraints, &allows_mem, &allows_reg))
1707 	abort ();
1708 
1709       generating_concat_p = 0;
1710 
1711       val = TREE_VALUE (tail);
1712       type = TREE_TYPE (val);
1713       op = expand_expr (val, NULL_RTX, VOIDmode,
1714 			(allows_mem && !allows_reg
1715 			 ? EXPAND_MEMORY : EXPAND_NORMAL));
1716 
1717       /* Never pass a CONCAT to an ASM.  */
1718       if (GET_CODE (op) == CONCAT)
1719 	op = force_reg (GET_MODE (op), op);
1720       else if (GET_CODE (op) == MEM)
1721 	op = validize_mem (op);
1722 
1723       if (asm_operand_ok (op, constraint) <= 0)
1724 	{
1725 	  if (allows_reg)
1726 	    op = force_reg (TYPE_MODE (type), op);
1727 	  else if (!allows_mem)
1728 	    warning ("asm operand %d probably doesn't match constraints",
1729 		     i + noutputs);
1730 	  else if (GET_CODE (op) == MEM)
1731 	    {
1732 	      /* We won't recognize either volatile memory or memory
1733 		 with a queued address as available a memory_operand
1734 		 at this point.  Ignore it: clearly this *is* a memory.  */
1735 	    }
1736 	  else
1737 	    {
1738 	      warning ("use of memory input without lvalue in asm operand %d is deprecated",
1739 		       i + noutputs);
1740 
1741 	      if (CONSTANT_P (op))
1742 		{
1743 		  op = force_const_mem (TYPE_MODE (type), op);
1744 		  op = validize_mem (op);
1745 		}
1746 	      else if (GET_CODE (op) == REG
1747 		       || GET_CODE (op) == SUBREG
1748 		       || GET_CODE (op) == ADDRESSOF
1749 		       || GET_CODE (op) == CONCAT)
1750 		{
1751 		  tree qual_type = build_qualified_type (type,
1752 							 (TYPE_QUALS (type)
1753 							  | TYPE_QUAL_CONST));
1754 		  rtx memloc = assign_temp (qual_type, 1, 1, 1);
1755 		  memloc = validize_mem (memloc);
1756 		  emit_move_insn (memloc, op);
1757 		  op = memloc;
1758 		}
1759 	    }
1760 	}
1761 
1762       generating_concat_p = old_generating_concat_p;
1763       ASM_OPERANDS_INPUT (body, i) = op;
1764 
1765       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1766 	= gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1767 
1768       if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1769 	clobber_conflict_found = 1;
1770     }
1771 
1772   /* Protect all the operands from the queue now that they have all been
1773      evaluated.  */
1774 
1775   generating_concat_p = 0;
1776 
1777   for (i = 0; i < ninputs - ninout; i++)
1778     ASM_OPERANDS_INPUT (body, i)
1779       = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1780 
1781   for (i = 0; i < noutputs; i++)
1782     output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1783 
1784   /* For in-out operands, copy output rtx to input rtx.  */
1785   for (i = 0; i < ninout; i++)
1786     {
1787       int j = inout_opnum[i];
1788       char buffer[16];
1789 
1790       ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1791 	= output_rtx[j];
1792 
1793       sprintf (buffer, "%d", j);
1794       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1795 	= gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1796     }
1797 
1798   generating_concat_p = old_generating_concat_p;
1799 
1800   /* Now, for each output, construct an rtx
1801      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1802 			       ARGVEC CONSTRAINTS OPNAMES))
1803      If there is more than one, put them inside a PARALLEL.  */
1804 
1805   if (noutputs == 1 && nclobbers == 0)
1806     {
1807       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1808       insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1809     }
1810 
1811   else if (noutputs == 0 && nclobbers == 0)
1812     {
1813       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
1814       insn = emit_insn (body);
1815     }
1816 
1817   else
1818     {
1819       rtx obody = body;
1820       int num = noutputs;
1821 
1822       if (num == 0)
1823 	num = 1;
1824 
1825       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1826 
1827       /* For each output operand, store a SET.  */
1828       for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1829 	{
1830 	  XVECEXP (body, 0, i)
1831 	    = gen_rtx_SET (VOIDmode,
1832 			   output_rtx[i],
1833 			   gen_rtx_ASM_OPERANDS
1834 			   (GET_MODE (output_rtx[i]),
1835 			    TREE_STRING_POINTER (string),
1836 			    constraints[i], i, argvec, constraintvec,
1837 			    filename, line));
1838 
1839 	  MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1840 	}
1841 
1842       /* If there are no outputs (but there are some clobbers)
1843 	 store the bare ASM_OPERANDS into the PARALLEL.  */
1844 
1845       if (i == 0)
1846 	XVECEXP (body, 0, i++) = obody;
1847 
1848       /* Store (clobber REG) for each clobbered register specified.  */
1849 
1850       for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1851 	{
1852 	  const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1853 	  int j = decode_reg_name (regname);
1854 	  rtx clobbered_reg;
1855 
1856 	  if (j < 0)
1857 	    {
1858 	      if (j == -3)	/* `cc', which is not a register */
1859 		continue;
1860 
1861 	      if (j == -4)	/* `memory', don't cache memory across asm */
1862 		{
1863 		  XVECEXP (body, 0, i++)
1864 		    = gen_rtx_CLOBBER (VOIDmode,
1865 				       gen_rtx_MEM
1866 				       (BLKmode,
1867 					gen_rtx_SCRATCH (VOIDmode)));
1868 		  continue;
1869 		}
1870 
1871 	      /* Ignore unknown register, error already signaled.  */
1872 	      continue;
1873 	    }
1874 
1875 	  /* Use QImode since that's guaranteed to clobber just one reg.  */
1876 	  clobbered_reg = gen_rtx_REG (QImode, j);
1877 
1878 	  /* Do sanity check for overlap between clobbers and respectively
1879 	     input and outputs that hasn't been handled.  Such overlap
1880 	     should have been detected and reported above.  */
1881 	  if (!clobber_conflict_found)
1882 	    {
1883 	      int opno;
1884 
1885 	      /* We test the old body (obody) contents to avoid tripping
1886 		 over the under-construction body.  */
1887 	      for (opno = 0; opno < noutputs; opno++)
1888 		if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1889 		  internal_error ("asm clobber conflict with output operand");
1890 
1891 	      for (opno = 0; opno < ninputs - ninout; opno++)
1892 		if (reg_overlap_mentioned_p (clobbered_reg,
1893 					     ASM_OPERANDS_INPUT (obody, opno)))
1894 		  internal_error ("asm clobber conflict with input operand");
1895 	    }
1896 
1897 	  XVECEXP (body, 0, i++)
1898 	    = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1899 	}
1900 
1901       insn = emit_insn (body);
1902     }
1903 
1904   /* For any outputs that needed reloading into registers, spill them
1905      back to where they belong.  */
1906   for (i = 0; i < noutputs; ++i)
1907     if (real_output_rtx[i])
1908       emit_move_insn (real_output_rtx[i], output_rtx[i]);
1909 
1910   free_temp_slots ();
1911 }
1912 
1913 /* A subroutine of expand_asm_operands.  Check that all operands have
1914    the same number of alternatives.  Return true if so.  */
1915 
1916 static bool
check_operand_nalternatives(outputs,inputs)1917 check_operand_nalternatives (outputs, inputs)
1918      tree outputs, inputs;
1919 {
1920   if (outputs || inputs)
1921     {
1922       tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1923       int nalternatives
1924 	= n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1925       tree next = inputs;
1926 
1927       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1928 	{
1929 	  error ("too many alternatives in `asm'");
1930 	  return false;
1931 	}
1932 
1933       tmp = outputs;
1934       while (tmp)
1935 	{
1936 	  const char *constraint
1937 	    = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1938 
1939 	  if (n_occurrences (',', constraint) != nalternatives)
1940 	    {
1941 	      error ("operand constraints for `asm' differ in number of alternatives");
1942 	      return false;
1943 	    }
1944 
1945 	  if (TREE_CHAIN (tmp))
1946 	    tmp = TREE_CHAIN (tmp);
1947 	  else
1948 	    tmp = next, next = 0;
1949 	}
1950     }
1951 
1952   return true;
1953 }
1954 
1955 /* A subroutine of expand_asm_operands.  Check that all operand names
1956    are unique.  Return true if so.  We rely on the fact that these names
1957    are identifiers, and so have been canonicalized by get_identifier,
1958    so all we need are pointer comparisons.  */
1959 
1960 static bool
check_unique_operand_names(outputs,inputs)1961 check_unique_operand_names (outputs, inputs)
1962      tree outputs, inputs;
1963 {
1964   tree i, j;
1965 
1966   for (i = outputs; i ; i = TREE_CHAIN (i))
1967     {
1968       tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1969       if (! i_name)
1970 	continue;
1971 
1972       for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1973 	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1974 	  goto failure;
1975     }
1976 
1977   for (i = inputs; i ; i = TREE_CHAIN (i))
1978     {
1979       tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1980       if (! i_name)
1981 	continue;
1982 
1983       for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1984 	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1985 	  goto failure;
1986       for (j = outputs; j ; j = TREE_CHAIN (j))
1987 	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1988 	  goto failure;
1989     }
1990 
1991   return true;
1992 
1993  failure:
1994   error ("duplicate asm operand name '%s'",
1995 	 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1996   return false;
1997 }
1998 
1999 /* A subroutine of expand_asm_operands.  Resolve the names of the operands
2000    in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
2001    STRING and in the constraints to those numbers.  */
2002 
2003 static tree
resolve_operand_names(string,outputs,inputs,pconstraints)2004 resolve_operand_names (string, outputs, inputs, pconstraints)
2005      tree string;
2006      tree outputs, inputs;
2007      const char **pconstraints;
2008 {
2009   char *buffer = xstrdup (TREE_STRING_POINTER (string));
2010   char *p;
2011   tree t;
2012 
2013   /* Assume that we will not need extra space to perform the substitution.
2014      This because we get to remove '[' and ']', which means we cannot have
2015      a problem until we have more than 999 operands.  */
2016 
2017   p = buffer;
2018   while ((p = strchr (p, '%')) != NULL)
2019     {
2020       if (p[1] == '[')
2021 	p += 1;
2022       else if (ISALPHA (p[1]) && p[2] == '[')
2023 	p += 2;
2024       else
2025 	{
2026 	  p += 1;
2027 	  continue;
2028 	}
2029 
2030       p = resolve_operand_name_1 (p, outputs, inputs);
2031     }
2032 
2033   string = build_string (strlen (buffer), buffer);
2034   free (buffer);
2035 
2036   /* Collect output constraints here because it's convenient.
2037      There should be no named operands here; this is verified
2038      in expand_asm_operand.  */
2039   for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2040     *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2041 
2042   /* Substitute [<name>] in input constraint strings.  */
2043   for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2044     {
2045       const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2046       if (strchr (c, '[') == NULL)
2047 	*pconstraints = c;
2048       else
2049 	{
2050 	  p = buffer = xstrdup (c);
2051 	  while ((p = strchr (p, '[')) != NULL)
2052 	    p = resolve_operand_name_1 (p, outputs, inputs);
2053 
2054 	  *pconstraints = ggc_alloc_string (buffer, -1);
2055 	  free (buffer);
2056 	}
2057     }
2058 
2059   return string;
2060 }
2061 
2062 /* A subroutine of resolve_operand_names.  P points to the '[' for a
2063    potential named operand of the form [<name>].  In place, replace
2064    the name and brackets with a number.  Return a pointer to the
2065    balance of the string after substitution.  */
2066 
2067 static char *
resolve_operand_name_1(p,outputs,inputs)2068 resolve_operand_name_1 (p, outputs, inputs)
2069      char *p;
2070      tree outputs, inputs;
2071 {
2072   char *q;
2073   int op;
2074   tree t;
2075   size_t len;
2076 
2077   /* Collect the operand name.  */
2078   q = strchr (p, ']');
2079   if (!q)
2080     {
2081       error ("missing close brace for named operand");
2082       return strchr (p, '\0');
2083     }
2084   len = q - p - 1;
2085 
2086   /* Resolve the name to a number.  */
2087   for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2088     {
2089       tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2090       if (name)
2091 	{
2092 	  const char *c = TREE_STRING_POINTER (name);
2093 	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2094 	    goto found;
2095 	}
2096     }
2097   for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2098     {
2099       tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2100       if (name)
2101 	{
2102 	  const char *c = TREE_STRING_POINTER (name);
2103 	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2104 	    goto found;
2105 	}
2106     }
2107 
2108   *q = '\0';
2109   error ("undefined named operand '%s'", p + 1);
2110   op = 0;
2111  found:
2112 
2113   /* Replace the name with the number.  Unfortunately, not all libraries
2114      get the return value of sprintf correct, so search for the end of the
2115      generated string by hand.  */
2116   sprintf (p, "%d", op);
2117   p = strchr (p, '\0');
2118 
2119   /* Verify the no extra buffer space assumption.  */
2120   if (p > q)
2121     abort ();
2122 
2123   /* Shift the rest of the buffer down to fill the gap.  */
2124   memmove (p, q + 1, strlen (q + 1) + 1);
2125 
2126   return p;
2127 }
2128 
2129 /* Generate RTL to evaluate the expression EXP
2130    and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2131    Provided just for backward-compatibility.  expand_expr_stmt_value()
2132    should be used for new code.  */
2133 
2134 void
expand_expr_stmt(exp)2135 expand_expr_stmt (exp)
2136      tree exp;
2137 {
2138   expand_expr_stmt_value (exp, -1, 1);
2139 }
2140 
2141 /* Generate RTL to evaluate the expression EXP.  WANT_VALUE tells
2142    whether to (1) save the value of the expression, (0) discard it or
2143    (-1) use expr_stmts_for_value to tell.  The use of -1 is
2144    deprecated, and retained only for backward compatibility.  */
2145 
2146 void
expand_expr_stmt_value(exp,want_value,maybe_last)2147 expand_expr_stmt_value (exp, want_value, maybe_last)
2148      tree exp;
2149      int want_value, maybe_last;
2150 {
2151   rtx value;
2152   tree type;
2153 
2154   if (want_value == -1)
2155     want_value = expr_stmts_for_value != 0;
2156 
2157   /* If -W, warn about statements with no side effects,
2158      except for an explicit cast to void (e.g. for assert()), and
2159      except for last statement in ({...}) where they may be useful.  */
2160   if (! want_value
2161       && (expr_stmts_for_value == 0 || ! maybe_last)
2162       && exp != error_mark_node)
2163     {
2164       if (! TREE_SIDE_EFFECTS (exp))
2165 	{
2166 	  if ((extra_warnings || warn_unused_value)
2167 	      && !(TREE_CODE (exp) == CONVERT_EXPR
2168 		   && VOID_TYPE_P (TREE_TYPE (exp))))
2169 	    warning_with_file_and_line (emit_filename, emit_lineno,
2170 				        "statement with no effect");
2171 	}
2172       else if (warn_unused_value)
2173 	warn_if_unused_value (exp);
2174     }
2175 
2176   /* If EXP is of function type and we are expanding statements for
2177      value, convert it to pointer-to-function.  */
2178   if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2179     exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2180 
2181   /* The call to `expand_expr' could cause last_expr_type and
2182      last_expr_value to get reset.  Therefore, we set last_expr_value
2183      and last_expr_type *after* calling expand_expr.  */
2184   value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2185 		       VOIDmode, 0);
2186   type = TREE_TYPE (exp);
2187 
2188   /* If all we do is reference a volatile value in memory,
2189      copy it to a register to be sure it is actually touched.  */
2190   if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2191     {
2192       if (TYPE_MODE (type) == VOIDmode)
2193 	;
2194       else if (TYPE_MODE (type) != BLKmode)
2195 	value = copy_to_reg (value);
2196       else
2197 	{
2198 	  rtx lab = gen_label_rtx ();
2199 
2200 	  /* Compare the value with itself to reference it.  */
2201 	  emit_cmp_and_jump_insns (value, value, EQ,
2202 				   expand_expr (TYPE_SIZE (type),
2203 						NULL_RTX, VOIDmode, 0),
2204 				   BLKmode, 0, lab);
2205 	  emit_label (lab);
2206 	}
2207     }
2208 
2209   /* If this expression is part of a ({...}) and is in memory, we may have
2210      to preserve temporaries.  */
2211   preserve_temp_slots (value);
2212 
2213   /* Free any temporaries used to evaluate this expression.  Any temporary
2214      used as a result of this expression will already have been preserved
2215      above.  */
2216   free_temp_slots ();
2217 
2218   if (want_value)
2219     {
2220       last_expr_value = value;
2221       last_expr_type = type;
2222     }
2223 
2224   emit_queue ();
2225 }
2226 
2227 /* Warn if EXP contains any computations whose results are not used.
2228    Return 1 if a warning is printed; 0 otherwise.  */
2229 
2230 int
warn_if_unused_value(exp)2231 warn_if_unused_value (exp)
2232      tree exp;
2233 {
2234   if (TREE_USED (exp))
2235     return 0;
2236 
2237   /* Don't warn about void constructs.  This includes casting to void,
2238      void function calls, and statement expressions with a final cast
2239      to void.  */
2240   if (VOID_TYPE_P (TREE_TYPE (exp)))
2241     return 0;
2242 
2243   switch (TREE_CODE (exp))
2244     {
2245     case PREINCREMENT_EXPR:
2246     case POSTINCREMENT_EXPR:
2247     case PREDECREMENT_EXPR:
2248     case POSTDECREMENT_EXPR:
2249     case MODIFY_EXPR:
2250     case INIT_EXPR:
2251     case TARGET_EXPR:
2252     case CALL_EXPR:
2253     case METHOD_CALL_EXPR:
2254     case RTL_EXPR:
2255     case TRY_CATCH_EXPR:
2256     case WITH_CLEANUP_EXPR:
2257     case EXIT_EXPR:
2258       return 0;
2259 
2260     case BIND_EXPR:
2261       /* For a binding, warn if no side effect within it.  */
2262       return warn_if_unused_value (TREE_OPERAND (exp, 1));
2263 
2264     case SAVE_EXPR:
2265       return warn_if_unused_value (TREE_OPERAND (exp, 1));
2266 
2267     case TRUTH_ORIF_EXPR:
2268     case TRUTH_ANDIF_EXPR:
2269       /* In && or ||, warn if 2nd operand has no side effect.  */
2270       return warn_if_unused_value (TREE_OPERAND (exp, 1));
2271 
2272     case COMPOUND_EXPR:
2273       if (TREE_NO_UNUSED_WARNING (exp))
2274 	return 0;
2275       if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2276 	return 1;
2277       /* Let people do `(foo (), 0)' without a warning.  */
2278       if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2279 	return 0;
2280       return warn_if_unused_value (TREE_OPERAND (exp, 1));
2281 
2282     case NOP_EXPR:
2283     case CONVERT_EXPR:
2284     case NON_LVALUE_EXPR:
2285       /* Don't warn about conversions not explicit in the user's program.  */
2286       if (TREE_NO_UNUSED_WARNING (exp))
2287 	return 0;
2288       /* Assignment to a cast usually results in a cast of a modify.
2289 	 Don't complain about that.  There can be an arbitrary number of
2290 	 casts before the modify, so we must loop until we find the first
2291 	 non-cast expression and then test to see if that is a modify.  */
2292       {
2293 	tree tem = TREE_OPERAND (exp, 0);
2294 
2295 	while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2296 	  tem = TREE_OPERAND (tem, 0);
2297 
2298 	if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2299 	    || TREE_CODE (tem) == CALL_EXPR)
2300 	  return 0;
2301       }
2302       goto maybe_warn;
2303 
2304     case INDIRECT_REF:
2305       /* Don't warn about automatic dereferencing of references, since
2306 	 the user cannot control it.  */
2307       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2308 	return warn_if_unused_value (TREE_OPERAND (exp, 0));
2309       /* Fall through.  */
2310 
2311     default:
2312       /* Referencing a volatile value is a side effect, so don't warn.  */
2313       if ((DECL_P (exp)
2314 	   || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2315 	  && TREE_THIS_VOLATILE (exp))
2316 	return 0;
2317 
2318       /* If this is an expression which has no operands, there is no value
2319 	 to be unused.  There are no such language-independent codes,
2320 	 but front ends may define such.  */
2321       if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2322 	  && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2323 	return 0;
2324 
2325     maybe_warn:
2326       /* If this is an expression with side effects, don't warn.  */
2327       if (TREE_SIDE_EFFECTS (exp))
2328 	return 0;
2329 
2330       warning_with_file_and_line (emit_filename, emit_lineno,
2331 				  "value computed is not used");
2332       return 1;
2333     }
2334 }
2335 
2336 /* Clear out the memory of the last expression evaluated.  */
2337 
2338 void
clear_last_expr()2339 clear_last_expr ()
2340 {
2341   last_expr_type = NULL_TREE;
2342   last_expr_value = NULL_RTX;
2343 }
2344 
2345 /* Begin a statement-expression, i.e., a series of statements which
2346    may return a value.  Return the RTL_EXPR for this statement expr.
2347    The caller must save that value and pass it to
2348    expand_end_stmt_expr.  If HAS_SCOPE is nonzero, temporaries created
2349    in the statement-expression are deallocated at the end of the
2350    expression.  */
2351 
2352 tree
expand_start_stmt_expr(has_scope)2353 expand_start_stmt_expr (has_scope)
2354      int has_scope;
2355 {
2356   tree t;
2357 
2358   /* Make the RTL_EXPR node temporary, not momentary,
2359      so that rtl_expr_chain doesn't become garbage.  */
2360   t = make_node (RTL_EXPR);
2361   do_pending_stack_adjust ();
2362   if (has_scope)
2363     start_sequence_for_rtl_expr (t);
2364   else
2365     start_sequence ();
2366   NO_DEFER_POP;
2367   expr_stmts_for_value++;
2368   return t;
2369 }
2370 
2371 /* Restore the previous state at the end of a statement that returns a value.
2372    Returns a tree node representing the statement's value and the
2373    insns to compute the value.
2374 
2375    The nodes of that expression have been freed by now, so we cannot use them.
2376    But we don't want to do that anyway; the expression has already been
2377    evaluated and now we just want to use the value.  So generate a RTL_EXPR
2378    with the proper type and RTL value.
2379 
2380    If the last substatement was not an expression,
2381    return something with type `void'.  */
2382 
2383 tree
expand_end_stmt_expr(t)2384 expand_end_stmt_expr (t)
2385      tree t;
2386 {
2387   OK_DEFER_POP;
2388 
2389   if (! last_expr_value || ! last_expr_type)
2390     {
2391       last_expr_value = const0_rtx;
2392       last_expr_type = void_type_node;
2393     }
2394   else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2395     /* Remove any possible QUEUED.  */
2396     last_expr_value = protect_from_queue (last_expr_value, 0);
2397 
2398   emit_queue ();
2399 
2400   TREE_TYPE (t) = last_expr_type;
2401   RTL_EXPR_RTL (t) = last_expr_value;
2402   RTL_EXPR_SEQUENCE (t) = get_insns ();
2403 
2404   rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2405 
2406   end_sequence ();
2407 
2408   /* Don't consider deleting this expr or containing exprs at tree level.  */
2409   TREE_SIDE_EFFECTS (t) = 1;
2410   /* Propagate volatility of the actual RTL expr.  */
2411   TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2412 
2413   clear_last_expr ();
2414   expr_stmts_for_value--;
2415 
2416   return t;
2417 }
2418 
2419 /* Generate RTL for the start of an if-then.  COND is the expression
2420    whose truth should be tested.
2421 
2422    If EXITFLAG is nonzero, this conditional is visible to
2423    `exit_something'.  */
2424 
2425 void
expand_start_cond(cond,exitflag)2426 expand_start_cond (cond, exitflag)
2427      tree cond;
2428      int exitflag;
2429 {
2430   struct nesting *thiscond = ALLOC_NESTING ();
2431 
2432   /* Make an entry on cond_stack for the cond we are entering.  */
2433 
2434   thiscond->desc = COND_NESTING;
2435   thiscond->next = cond_stack;
2436   thiscond->all = nesting_stack;
2437   thiscond->depth = ++nesting_depth;
2438   thiscond->data.cond.next_label = gen_label_rtx ();
2439   /* Before we encounter an `else', we don't need a separate exit label
2440      unless there are supposed to be exit statements
2441      to exit this conditional.  */
2442   thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2443   thiscond->data.cond.endif_label = thiscond->exit_label;
2444   cond_stack = thiscond;
2445   nesting_stack = thiscond;
2446 
2447   do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2448 }
2449 
2450 /* Generate RTL between then-clause and the elseif-clause
2451    of an if-then-elseif-....  */
2452 
2453 void
expand_start_elseif(cond)2454 expand_start_elseif (cond)
2455      tree cond;
2456 {
2457   if (cond_stack->data.cond.endif_label == 0)
2458     cond_stack->data.cond.endif_label = gen_label_rtx ();
2459   emit_jump (cond_stack->data.cond.endif_label);
2460   emit_label (cond_stack->data.cond.next_label);
2461   cond_stack->data.cond.next_label = gen_label_rtx ();
2462   do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2463 }
2464 
2465 /* Generate RTL between the then-clause and the else-clause
2466    of an if-then-else.  */
2467 
2468 void
expand_start_else()2469 expand_start_else ()
2470 {
2471   if (cond_stack->data.cond.endif_label == 0)
2472     cond_stack->data.cond.endif_label = gen_label_rtx ();
2473 
2474   emit_jump (cond_stack->data.cond.endif_label);
2475   emit_label (cond_stack->data.cond.next_label);
2476   cond_stack->data.cond.next_label = 0;  /* No more _else or _elseif calls.  */
2477 }
2478 
2479 /* After calling expand_start_else, turn this "else" into an "else if"
2480    by providing another condition.  */
2481 
2482 void
expand_elseif(cond)2483 expand_elseif (cond)
2484      tree cond;
2485 {
2486   cond_stack->data.cond.next_label = gen_label_rtx ();
2487   do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2488 }
2489 
2490 /* Generate RTL for the end of an if-then.
2491    Pop the record for it off of cond_stack.  */
2492 
2493 void
expand_end_cond()2494 expand_end_cond ()
2495 {
2496   struct nesting *thiscond = cond_stack;
2497 
2498   do_pending_stack_adjust ();
2499   if (thiscond->data.cond.next_label)
2500     emit_label (thiscond->data.cond.next_label);
2501   if (thiscond->data.cond.endif_label)
2502     emit_label (thiscond->data.cond.endif_label);
2503 
2504   POPSTACK (cond_stack);
2505   clear_last_expr ();
2506 }
2507 
2508 /* Generate RTL for the start of a loop.  EXIT_FLAG is nonzero if this
2509    loop should be exited by `exit_something'.  This is a loop for which
2510    `expand_continue' will jump to the top of the loop.
2511 
2512    Make an entry on loop_stack to record the labels associated with
2513    this loop.  */
2514 
2515 struct nesting *
expand_start_loop(exit_flag)2516 expand_start_loop (exit_flag)
2517      int exit_flag;
2518 {
2519   struct nesting *thisloop = ALLOC_NESTING ();
2520 
2521   /* Make an entry on loop_stack for the loop we are entering.  */
2522 
2523   thisloop->desc = LOOP_NESTING;
2524   thisloop->next = loop_stack;
2525   thisloop->all = nesting_stack;
2526   thisloop->depth = ++nesting_depth;
2527   thisloop->data.loop.start_label = gen_label_rtx ();
2528   thisloop->data.loop.end_label = gen_label_rtx ();
2529   thisloop->data.loop.alt_end_label = 0;
2530   thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2531   thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2532   loop_stack = thisloop;
2533   nesting_stack = thisloop;
2534 
2535   do_pending_stack_adjust ();
2536   emit_queue ();
2537   emit_note (NULL, NOTE_INSN_LOOP_BEG);
2538   emit_label (thisloop->data.loop.start_label);
2539 
2540   return thisloop;
2541 }
2542 
2543 /* Like expand_start_loop but for a loop where the continuation point
2544    (for expand_continue_loop) will be specified explicitly.  */
2545 
2546 struct nesting *
expand_start_loop_continue_elsewhere(exit_flag)2547 expand_start_loop_continue_elsewhere (exit_flag)
2548      int exit_flag;
2549 {
2550   struct nesting *thisloop = expand_start_loop (exit_flag);
2551   loop_stack->data.loop.continue_label = gen_label_rtx ();
2552   return thisloop;
2553 }
2554 
2555 /* Begin a null, aka do { } while (0) "loop".  But since the contents
2556    of said loop can still contain a break, we must frob the loop nest.  */
2557 
2558 struct nesting *
expand_start_null_loop()2559 expand_start_null_loop ()
2560 {
2561   struct nesting *thisloop = ALLOC_NESTING ();
2562 
2563   /* Make an entry on loop_stack for the loop we are entering.  */
2564 
2565   thisloop->desc = LOOP_NESTING;
2566   thisloop->next = loop_stack;
2567   thisloop->all = nesting_stack;
2568   thisloop->depth = ++nesting_depth;
2569   thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2570   thisloop->data.loop.end_label = gen_label_rtx ();
2571   thisloop->data.loop.alt_end_label = NULL_RTX;
2572   thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2573   thisloop->exit_label = thisloop->data.loop.end_label;
2574   loop_stack = thisloop;
2575   nesting_stack = thisloop;
2576 
2577   return thisloop;
2578 }
2579 
2580 /* Specify the continuation point for a loop started with
2581    expand_start_loop_continue_elsewhere.
2582    Use this at the point in the code to which a continue statement
2583    should jump.  */
2584 
2585 void
expand_loop_continue_here()2586 expand_loop_continue_here ()
2587 {
2588   do_pending_stack_adjust ();
2589   emit_note (NULL, NOTE_INSN_LOOP_CONT);
2590   emit_label (loop_stack->data.loop.continue_label);
2591 }
2592 
2593 /* Finish a loop.  Generate a jump back to the top and the loop-exit label.
2594    Pop the block off of loop_stack.  */
2595 
2596 void
expand_end_loop()2597 expand_end_loop ()
2598 {
2599   rtx start_label = loop_stack->data.loop.start_label;
2600   rtx etc_note;
2601   int eh_regions, debug_blocks;
2602 
2603   /* Mark the continue-point at the top of the loop if none elsewhere.  */
2604   if (start_label == loop_stack->data.loop.continue_label)
2605     emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2606 
2607   do_pending_stack_adjust ();
2608 
2609   /* If the loop starts with a loop exit, roll that to the end where
2610      it will optimize together with the jump back.
2611 
2612      If the loop presently looks like this (in pseudo-C):
2613 
2614 	LOOP_BEG
2615 	start_label:
2616 	  if (test) goto end_label;
2617 	LOOP_END_TOP_COND
2618 	  body;
2619 	  goto start_label;
2620 	end_label:
2621 
2622      transform it to look like:
2623 
2624 	LOOP_BEG
2625 	  goto start_label;
2626 	top_label:
2627 	  body;
2628 	start_label:
2629 	  if (test) goto end_label;
2630 	  goto top_label;
2631 	end_label:
2632 
2633      We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2634      the end of the entry condtional.  Without this, our lexical scan
2635      can't tell the difference between an entry conditional and a
2636      body conditional that exits the loop.  Mistaking the two means
2637      that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2638      screw up loop unrolling.
2639 
2640      Things will be oh so much better when loop optimization is done
2641      off of a proper control flow graph...  */
2642 
2643   /* Scan insns from the top of the loop looking for the END_TOP_COND note.  */
2644 
2645   eh_regions = debug_blocks = 0;
2646   for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2647     if (GET_CODE (etc_note) == NOTE)
2648       {
2649 	if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2650 	  break;
2651 
2652 	/* We must not walk into a nested loop.  */
2653 	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2654 	  {
2655 	    etc_note = NULL_RTX;
2656 	    break;
2657 	  }
2658 
2659 	/* At the same time, scan for EH region notes, as we don't want
2660 	   to scrog region nesting.  This shouldn't happen, but...  */
2661 	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2662 	  eh_regions++;
2663 	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2664 	  {
2665 	    if (--eh_regions < 0)
2666 	      /* We've come to the end of an EH region, but never saw the
2667 		 beginning of that region.  That means that an EH region
2668 		 begins before the top of the loop, and ends in the middle
2669 		 of it.  The existence of such a situation violates a basic
2670 		 assumption in this code, since that would imply that even
2671 		 when EH_REGIONS is zero, we might move code out of an
2672 		 exception region.  */
2673 	      abort ();
2674 	  }
2675 
2676 	/* Likewise for debug scopes.  In this case we'll either (1) move
2677 	   all of the notes if they are properly nested or (2) leave the
2678 	   notes alone and only rotate the loop at high optimization
2679 	   levels when we expect to scrog debug info.  */
2680 	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2681 	  debug_blocks++;
2682 	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2683 	  debug_blocks--;
2684       }
2685 
2686   if (etc_note
2687       && optimize
2688       && eh_regions == 0
2689       && (debug_blocks == 0 || optimize >= 2)
2690       && NEXT_INSN (etc_note) != NULL_RTX
2691       && ! any_condjump_p (get_last_insn ()))
2692     {
2693       /* We found one.  Move everything from START to ETC to the end
2694 	 of the loop, and add a jump from the top of the loop.  */
2695       rtx top_label = gen_label_rtx ();
2696       rtx start_move = start_label;
2697 
2698       /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2699 	 then we want to move this note also.  */
2700       if (GET_CODE (PREV_INSN (start_move)) == NOTE
2701 	  && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2702 	start_move = PREV_INSN (start_move);
2703 
2704       emit_label_before (top_label, start_move);
2705 
2706       /* Actually move the insns.  If the debug scopes are nested, we
2707 	 can move everything at once.  Otherwise we have to move them
2708 	 one by one and squeeze out the block notes.  */
2709       if (debug_blocks == 0)
2710 	reorder_insns (start_move, etc_note, get_last_insn ());
2711       else
2712 	{
2713 	  rtx insn, next_insn;
2714 	  for (insn = start_move; insn; insn = next_insn)
2715 	    {
2716 	      /* Figure out which insn comes after this one.  We have
2717 		 to do this before we move INSN.  */
2718 	      next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2719 
2720 	      if (GET_CODE (insn) == NOTE
2721 		  && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2722 		      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2723 		continue;
2724 
2725 	      reorder_insns (insn, insn, get_last_insn ());
2726 	    }
2727 	}
2728 
2729       /* Add the jump from the top of the loop.  */
2730       emit_jump_insn_before (gen_jump (start_label), top_label);
2731       emit_barrier_before (top_label);
2732       start_label = top_label;
2733     }
2734 
2735   emit_jump (start_label);
2736   emit_note (NULL, NOTE_INSN_LOOP_END);
2737   emit_label (loop_stack->data.loop.end_label);
2738 
2739   POPSTACK (loop_stack);
2740 
2741   clear_last_expr ();
2742 }
2743 
2744 /* Finish a null loop, aka do { } while (0).  */
2745 
2746 void
expand_end_null_loop()2747 expand_end_null_loop ()
2748 {
2749   do_pending_stack_adjust ();
2750   emit_label (loop_stack->data.loop.end_label);
2751 
2752   POPSTACK (loop_stack);
2753 
2754   clear_last_expr ();
2755 }
2756 
2757 /* Generate a jump to the current loop's continue-point.
2758    This is usually the top of the loop, but may be specified
2759    explicitly elsewhere.  If not currently inside a loop,
2760    return 0 and do nothing; caller will print an error message.  */
2761 
2762 int
expand_continue_loop(whichloop)2763 expand_continue_loop (whichloop)
2764      struct nesting *whichloop;
2765 {
2766   /* Emit information for branch prediction.  */
2767   rtx note;
2768 
2769   if (flag_guess_branch_prob)
2770     {
2771       note = emit_note (NULL, NOTE_INSN_PREDICTION);
2772       NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2773     }
2774   clear_last_expr ();
2775   if (whichloop == 0)
2776     whichloop = loop_stack;
2777   if (whichloop == 0)
2778     return 0;
2779   expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2780 			NULL_RTX);
2781   return 1;
2782 }
2783 
2784 /* Generate a jump to exit the current loop.  If not currently inside a loop,
2785    return 0 and do nothing; caller will print an error message.  */
2786 
2787 int
expand_exit_loop(whichloop)2788 expand_exit_loop (whichloop)
2789      struct nesting *whichloop;
2790 {
2791   clear_last_expr ();
2792   if (whichloop == 0)
2793     whichloop = loop_stack;
2794   if (whichloop == 0)
2795     return 0;
2796   expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2797   return 1;
2798 }
2799 
2800 /* Generate a conditional jump to exit the current loop if COND
2801    evaluates to zero.  If not currently inside a loop,
2802    return 0 and do nothing; caller will print an error message.  */
2803 
2804 int
expand_exit_loop_if_false(whichloop,cond)2805 expand_exit_loop_if_false (whichloop, cond)
2806      struct nesting *whichloop;
2807      tree cond;
2808 {
2809   rtx label = gen_label_rtx ();
2810   rtx last_insn;
2811   clear_last_expr ();
2812 
2813   if (whichloop == 0)
2814     whichloop = loop_stack;
2815   if (whichloop == 0)
2816     return 0;
2817   /* In order to handle fixups, we actually create a conditional jump
2818      around an unconditional branch to exit the loop.  If fixups are
2819      necessary, they go before the unconditional branch.  */
2820 
2821   do_jump (cond, NULL_RTX, label);
2822   last_insn = get_last_insn ();
2823   if (GET_CODE (last_insn) == CODE_LABEL)
2824     whichloop->data.loop.alt_end_label = last_insn;
2825   expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2826 			NULL_RTX);
2827   emit_label (label);
2828 
2829   return 1;
2830 }
2831 
2832 /* Like expand_exit_loop_if_false except also emit a note marking
2833    the end of the conditional.  Should only be used immediately
2834    after expand_loop_start.  */
2835 
2836 int
expand_exit_loop_top_cond(whichloop,cond)2837 expand_exit_loop_top_cond (whichloop, cond)
2838      struct nesting *whichloop;
2839      tree cond;
2840 {
2841   if (! expand_exit_loop_if_false (whichloop, cond))
2842     return 0;
2843 
2844   emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2845   return 1;
2846 }
2847 
2848 /* Return nonzero if the loop nest is empty.  Else return zero.  */
2849 
2850 int
stmt_loop_nest_empty()2851 stmt_loop_nest_empty ()
2852 {
2853   /* cfun->stmt can be NULL if we are building a call to get the
2854      EH context for a setjmp/longjmp EH target and the current
2855      function was a deferred inline function.  */
2856   return (cfun->stmt == NULL || loop_stack == NULL);
2857 }
2858 
2859 /* Return nonzero if we should preserve sub-expressions as separate
2860    pseudos.  We never do so if we aren't optimizing.  We always do so
2861    if -fexpensive-optimizations.
2862 
2863    Otherwise, we only do so if we are in the "early" part of a loop.  I.e.,
2864    the loop may still be a small one.  */
2865 
2866 int
preserve_subexpressions_p()2867 preserve_subexpressions_p ()
2868 {
2869   rtx insn;
2870 
2871   if (flag_expensive_optimizations)
2872     return 1;
2873 
2874   if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2875     return 0;
2876 
2877   insn = get_last_insn_anywhere ();
2878 
2879   return (insn
2880 	  && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2881 	      < n_non_fixed_regs * 3));
2882 
2883 }
2884 
2885 /* Generate a jump to exit the current loop, conditional, binding contour
2886    or case statement.  Not all such constructs are visible to this function,
2887    only those started with EXIT_FLAG nonzero.  Individual languages use
2888    the EXIT_FLAG parameter to control which kinds of constructs you can
2889    exit this way.
2890 
2891    If not currently inside anything that can be exited,
2892    return 0 and do nothing; caller will print an error message.  */
2893 
2894 int
expand_exit_something()2895 expand_exit_something ()
2896 {
2897   struct nesting *n;
2898   clear_last_expr ();
2899   for (n = nesting_stack; n; n = n->all)
2900     if (n->exit_label != 0)
2901       {
2902 	expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2903 	return 1;
2904       }
2905 
2906   return 0;
2907 }
2908 
2909 /* Generate RTL to return from the current function, with no value.
2910    (That is, we do not do anything about returning any value.)  */
2911 
2912 void
expand_null_return()2913 expand_null_return ()
2914 {
2915   rtx last_insn;
2916 
2917   last_insn = get_last_insn ();
2918 
2919   /* If this function was declared to return a value, but we
2920      didn't, clobber the return registers so that they are not
2921      propagated live to the rest of the function.  */
2922   clobber_return_register ();
2923 
2924   expand_null_return_1 (last_insn);
2925 }
2926 
2927 /* Try to guess whether the value of return means error code.  */
2928 static enum br_predictor
return_prediction(val)2929 return_prediction (val)
2930      rtx val;
2931 {
2932   /* Different heuristics for pointers and scalars.  */
2933   if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2934     {
2935       /* NULL is usually not returned.  */
2936       if (val == const0_rtx)
2937 	return PRED_NULL_RETURN;
2938     }
2939   else
2940     {
2941       /* Negative return values are often used to indicate
2942          errors.  */
2943       if (GET_CODE (val) == CONST_INT
2944 	  && INTVAL (val) < 0)
2945 	return PRED_NEGATIVE_RETURN;
2946       /* Constant return values are also usually erors,
2947          zero/one often mean booleans so exclude them from the
2948 	 heuristics.  */
2949       if (CONSTANT_P (val)
2950 	  && (val != const0_rtx && val != const1_rtx))
2951 	return PRED_CONST_RETURN;
2952     }
2953   return PRED_NO_PREDICTION;
2954 }
2955 
2956 /* Generate RTL to return from the current function, with value VAL.  */
2957 
2958 static void
expand_value_return(val)2959 expand_value_return (val)
2960      rtx val;
2961 {
2962   rtx last_insn;
2963   rtx return_reg;
2964   enum br_predictor pred;
2965 
2966   if (flag_guess_branch_prob
2967       && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2968     {
2969       /* Emit information for branch prediction.  */
2970       rtx note;
2971 
2972       note = emit_note (NULL, NOTE_INSN_PREDICTION);
2973 
2974       NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2975 
2976     }
2977 
2978   last_insn = get_last_insn ();
2979   return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2980 
2981   /* Copy the value to the return location
2982      unless it's already there.  */
2983 
2984   if (return_reg != val)
2985     {
2986       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2987 #ifdef PROMOTE_FUNCTION_RETURN
2988       int unsignedp = TREE_UNSIGNED (type);
2989       enum machine_mode old_mode
2990 	= DECL_MODE (DECL_RESULT (current_function_decl));
2991       enum machine_mode mode
2992 	= promote_mode (type, old_mode, &unsignedp, 1);
2993 
2994       if (mode != old_mode)
2995 	val = convert_modes (mode, old_mode, val, unsignedp);
2996 #endif
2997       if (GET_CODE (return_reg) == PARALLEL)
2998 	emit_group_load (return_reg, val, int_size_in_bytes (type));
2999       else
3000 	emit_move_insn (return_reg, val);
3001     }
3002 
3003   expand_null_return_1 (last_insn);
3004 }
3005 
3006 /* Output a return with no value.  If LAST_INSN is nonzero,
3007    pretend that the return takes place after LAST_INSN.  */
3008 
3009 static void
expand_null_return_1(last_insn)3010 expand_null_return_1 (last_insn)
3011      rtx last_insn;
3012 {
3013   rtx end_label = cleanup_label ? cleanup_label : return_label;
3014 
3015   clear_pending_stack_adjust ();
3016   do_pending_stack_adjust ();
3017   clear_last_expr ();
3018 
3019   if (end_label == 0)
3020      end_label = return_label = gen_label_rtx ();
3021   expand_goto_internal (NULL_TREE, end_label, last_insn);
3022 }
3023 
3024 /* Generate RTL to evaluate the expression RETVAL and return it
3025    from the current function.  */
3026 
3027 void
expand_return(retval)3028 expand_return (retval)
3029      tree retval;
3030 {
3031   /* If there are any cleanups to be performed, then they will
3032      be inserted following LAST_INSN.  It is desirable
3033      that the last_insn, for such purposes, should be the
3034      last insn before computing the return value.  Otherwise, cleanups
3035      which call functions can clobber the return value.  */
3036   /* ??? rms: I think that is erroneous, because in C++ it would
3037      run destructors on variables that might be used in the subsequent
3038      computation of the return value.  */
3039   rtx last_insn = 0;
3040   rtx result_rtl;
3041   rtx val = 0;
3042   tree retval_rhs;
3043 
3044   /* If function wants no value, give it none.  */
3045   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3046     {
3047       expand_expr (retval, NULL_RTX, VOIDmode, 0);
3048       emit_queue ();
3049       expand_null_return ();
3050       return;
3051     }
3052 
3053   if (retval == error_mark_node)
3054     {
3055       /* Treat this like a return of no value from a function that
3056 	 returns a value.  */
3057       expand_null_return ();
3058       return;
3059     }
3060   else if (TREE_CODE (retval) == RESULT_DECL)
3061     retval_rhs = retval;
3062   else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3063 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3064     retval_rhs = TREE_OPERAND (retval, 1);
3065   else if (VOID_TYPE_P (TREE_TYPE (retval)))
3066     /* Recognize tail-recursive call to void function.  */
3067     retval_rhs = retval;
3068   else
3069     retval_rhs = NULL_TREE;
3070 
3071   last_insn = get_last_insn ();
3072 
3073   /* Distribute return down conditional expr if either of the sides
3074      may involve tail recursion (see test below).  This enhances the number
3075      of tail recursions we see.  Don't do this always since it can produce
3076      sub-optimal code in some cases and we distribute assignments into
3077      conditional expressions when it would help.  */
3078 
3079   if (optimize && retval_rhs != 0
3080       && frame_offset == 0
3081       && TREE_CODE (retval_rhs) == COND_EXPR
3082       && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3083 	  || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3084     {
3085       rtx label = gen_label_rtx ();
3086       tree expr;
3087 
3088       do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3089       start_cleanup_deferral ();
3090       expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3091 		    DECL_RESULT (current_function_decl),
3092 		    TREE_OPERAND (retval_rhs, 1));
3093       TREE_SIDE_EFFECTS (expr) = 1;
3094       expand_return (expr);
3095       emit_label (label);
3096 
3097       expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3098 		    DECL_RESULT (current_function_decl),
3099 		    TREE_OPERAND (retval_rhs, 2));
3100       TREE_SIDE_EFFECTS (expr) = 1;
3101       expand_return (expr);
3102       end_cleanup_deferral ();
3103       return;
3104     }
3105 
3106   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3107 
3108   /* If the result is an aggregate that is being returned in one (or more)
3109      registers, load the registers here.  The compiler currently can't handle
3110      copying a BLKmode value into registers.  We could put this code in a
3111      more general area (for use by everyone instead of just function
3112      call/return), but until this feature is generally usable it is kept here
3113      (and in expand_call).  The value must go into a pseudo in case there
3114      are cleanups that will clobber the real return register.  */
3115 
3116   if (retval_rhs != 0
3117       && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3118       && GET_CODE (result_rtl) == REG)
3119     {
3120       int i;
3121       unsigned HOST_WIDE_INT bitpos, xbitpos;
3122       unsigned HOST_WIDE_INT big_endian_correction = 0;
3123       unsigned HOST_WIDE_INT bytes
3124 	= int_size_in_bytes (TREE_TYPE (retval_rhs));
3125       int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3126       unsigned int bitsize
3127 	= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3128       rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3129       rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3130       rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3131       enum machine_mode tmpmode, result_reg_mode;
3132 
3133       if (bytes == 0)
3134 	{
3135 	  expand_null_return ();
3136 	  return;
3137 	}
3138 
3139       /* Structures whose size is not a multiple of a word are aligned
3140 	 to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
3141 	 machine, this means we must skip the empty high order bytes when
3142 	 calculating the bit offset.  */
3143       if (BYTES_BIG_ENDIAN
3144 	  && bytes % UNITS_PER_WORD)
3145 	big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3146 						  * BITS_PER_UNIT));
3147 
3148       /* Copy the structure BITSIZE bits at a time.  */
3149       for (bitpos = 0, xbitpos = big_endian_correction;
3150 	   bitpos < bytes * BITS_PER_UNIT;
3151 	   bitpos += bitsize, xbitpos += bitsize)
3152 	{
3153 	  /* We need a new destination pseudo each time xbitpos is
3154 	     on a word boundary and when xbitpos == big_endian_correction
3155 	     (the first time through).  */
3156 	  if (xbitpos % BITS_PER_WORD == 0
3157 	      || xbitpos == big_endian_correction)
3158 	    {
3159 	      /* Generate an appropriate register.  */
3160 	      dst = gen_reg_rtx (word_mode);
3161 	      result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3162 
3163 	      /* Clear the destination before we move anything into it.  */
3164 	      emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3165 	    }
3166 
3167 	  /* We need a new source operand each time bitpos is on a word
3168 	     boundary.  */
3169 	  if (bitpos % BITS_PER_WORD == 0)
3170 	    src = operand_subword_force (result_val,
3171 					 bitpos / BITS_PER_WORD,
3172 					 BLKmode);
3173 
3174 	  /* Use bitpos for the source extraction (left justified) and
3175 	     xbitpos for the destination store (right justified).  */
3176 	  store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3177 			   extract_bit_field (src, bitsize,
3178 					      bitpos % BITS_PER_WORD, 1,
3179 					      NULL_RTX, word_mode, word_mode,
3180 					      BITS_PER_WORD),
3181 			   BITS_PER_WORD);
3182 	}
3183 
3184       /* Find the smallest integer mode large enough to hold the
3185 	 entire structure and use that mode instead of BLKmode
3186 	 on the USE insn for the return register.  */
3187       for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3188 	   tmpmode != VOIDmode;
3189 	   tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3190 	/* Have we found a large enough mode?  */
3191 	if (GET_MODE_SIZE (tmpmode) >= bytes)
3192 	  break;
3193 
3194       /* No suitable mode found.  */
3195       if (tmpmode == VOIDmode)
3196 	abort ();
3197 
3198       PUT_MODE (result_rtl, tmpmode);
3199 
3200       if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3201 	result_reg_mode = word_mode;
3202       else
3203 	result_reg_mode = tmpmode;
3204       result_reg = gen_reg_rtx (result_reg_mode);
3205 
3206       emit_queue ();
3207       for (i = 0; i < n_regs; i++)
3208 	emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3209 			result_pseudos[i]);
3210 
3211       if (tmpmode != result_reg_mode)
3212 	result_reg = gen_lowpart (tmpmode, result_reg);
3213 
3214       expand_value_return (result_reg);
3215     }
3216   else if (retval_rhs != 0
3217 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3218 	   && (GET_CODE (result_rtl) == REG
3219 	       || (GET_CODE (result_rtl) == PARALLEL)))
3220     {
3221       /* Calculate the return value into a temporary (usually a pseudo
3222          reg).  */
3223       tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3224       tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3225 
3226       val = assign_temp (nt, 0, 0, 1);
3227       val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3228       val = force_not_mem (val);
3229       emit_queue ();
3230       /* Return the calculated value, doing cleanups first.  */
3231       expand_value_return (val);
3232     }
3233   else
3234     {
3235       /* No cleanups or no hard reg used;
3236 	 calculate value into hard return reg.  */
3237       expand_expr (retval, const0_rtx, VOIDmode, 0);
3238       emit_queue ();
3239       expand_value_return (result_rtl);
3240     }
3241 }
3242 
3243 /* Return 1 if the end of the generated RTX is not a barrier.
3244    This means code already compiled can drop through.  */
3245 
3246 int
drop_through_at_end_p()3247 drop_through_at_end_p ()
3248 {
3249   rtx insn = get_last_insn ();
3250   while (insn && GET_CODE (insn) == NOTE)
3251     insn = PREV_INSN (insn);
3252   return insn && GET_CODE (insn) != BARRIER;
3253 }
3254 
3255 /* Attempt to optimize a potential tail recursion call into a goto.
3256    ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3257    where to place the jump to the tail recursion label.
3258 
3259    Return TRUE if the call was optimized into a goto.  */
3260 
3261 int
optimize_tail_recursion(arguments,last_insn)3262 optimize_tail_recursion (arguments, last_insn)
3263      tree arguments;
3264      rtx last_insn;
3265 {
3266   /* Finish checking validity, and if valid emit code to set the
3267      argument variables for the new call.  */
3268   if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3269     {
3270       if (tail_recursion_label == 0)
3271 	{
3272 	  tail_recursion_label = gen_label_rtx ();
3273 	  emit_label_after (tail_recursion_label,
3274 			    tail_recursion_reentry);
3275 	}
3276       emit_queue ();
3277       expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3278       emit_barrier ();
3279       return 1;
3280     }
3281   return 0;
3282 }
3283 
3284 /* Emit code to alter this function's formal parms for a tail-recursive call.
3285    ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3286    FORMALS is the chain of decls of formals.
3287    Return 1 if this can be done;
3288    otherwise return 0 and do not emit any code.  */
3289 
3290 static int
tail_recursion_args(actuals,formals)3291 tail_recursion_args (actuals, formals)
3292      tree actuals, formals;
3293 {
3294   tree a = actuals, f = formals;
3295   int i;
3296   rtx *argvec;
3297 
3298   /* Check that number and types of actuals are compatible
3299      with the formals.  This is not always true in valid C code.
3300      Also check that no formal needs to be addressable
3301      and that all formals are scalars.  */
3302 
3303   /* Also count the args.  */
3304 
3305   for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3306     {
3307       if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3308 	  != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3309 	return 0;
3310       if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3311 	return 0;
3312     }
3313   if (a != 0 || f != 0)
3314     return 0;
3315 
3316   /* Compute all the actuals.  */
3317 
3318   argvec = (rtx *) alloca (i * sizeof (rtx));
3319 
3320   for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3321     argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3322 
3323   /* Find which actual values refer to current values of previous formals.
3324      Copy each of them now, before any formal is changed.  */
3325 
3326   for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3327     {
3328       int copy = 0;
3329       int j;
3330       for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3331 	if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3332 	  {
3333 	    copy = 1;
3334 	    break;
3335 	  }
3336       if (copy)
3337 	argvec[i] = copy_to_reg (argvec[i]);
3338     }
3339 
3340   /* Store the values of the actuals into the formals.  */
3341 
3342   for (f = formals, a = actuals, i = 0; f;
3343        f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3344     {
3345       if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3346 	emit_move_insn (DECL_RTL (f), argvec[i]);
3347       else
3348 	{
3349 	  rtx tmp = argvec[i];
3350 	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
3351 	  promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3352 		       &unsignedp, 0);
3353 	  if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3354 	    {
3355 	      tmp = gen_reg_rtx (DECL_MODE (f));
3356 	      convert_move (tmp, argvec[i], unsignedp);
3357 	    }
3358 	  convert_move (DECL_RTL (f), tmp, unsignedp);
3359 	}
3360     }
3361 
3362   free_temp_slots ();
3363   return 1;
3364 }
3365 
3366 /* Generate the RTL code for entering a binding contour.
3367    The variables are declared one by one, by calls to `expand_decl'.
3368 
3369    FLAGS is a bitwise or of the following flags:
3370 
3371      1 - Nonzero if this construct should be visible to
3372          `exit_something'.
3373 
3374      2 - Nonzero if this contour does not require a
3375 	 NOTE_INSN_BLOCK_BEG note.  Virtually all calls from
3376 	 language-independent code should set this flag because they
3377 	 will not create corresponding BLOCK nodes.  (There should be
3378 	 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3379 	 and BLOCKs.)  If this flag is set, MARK_ENDS should be zero
3380 	 when expand_end_bindings is called.
3381 
3382     If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3383     optionally be supplied.  If so, it becomes the NOTE_BLOCK for the
3384     note.  */
3385 
3386 void
expand_start_bindings_and_block(flags,block)3387 expand_start_bindings_and_block (flags, block)
3388      int flags;
3389      tree block;
3390 {
3391   struct nesting *thisblock = ALLOC_NESTING ();
3392   rtx note;
3393   int exit_flag = ((flags & 1) != 0);
3394   int block_flag = ((flags & 2) == 0);
3395 
3396   /* If a BLOCK is supplied, then the caller should be requesting a
3397      NOTE_INSN_BLOCK_BEG note.  */
3398   if (!block_flag && block)
3399     abort ();
3400 
3401   /* Create a note to mark the beginning of the block.  */
3402   if (block_flag)
3403     {
3404       note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3405       NOTE_BLOCK (note) = block;
3406     }
3407   else
3408     note = emit_note (NULL, NOTE_INSN_DELETED);
3409 
3410   /* Make an entry on block_stack for the block we are entering.  */
3411 
3412   thisblock->desc = BLOCK_NESTING;
3413   thisblock->next = block_stack;
3414   thisblock->all = nesting_stack;
3415   thisblock->depth = ++nesting_depth;
3416   thisblock->data.block.stack_level = 0;
3417   thisblock->data.block.cleanups = 0;
3418   thisblock->data.block.n_function_calls = 0;
3419   thisblock->data.block.exception_region = 0;
3420   thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3421 
3422   thisblock->data.block.conditional_code = 0;
3423   thisblock->data.block.last_unconditional_cleanup = note;
3424   /* When we insert instructions after the last unconditional cleanup,
3425      we don't adjust last_insn.  That means that a later add_insn will
3426      clobber the instructions we've just added.  The easiest way to
3427      fix this is to just insert another instruction here, so that the
3428      instructions inserted after the last unconditional cleanup are
3429      never the last instruction.  */
3430   emit_note (NULL, NOTE_INSN_DELETED);
3431 
3432   if (block_stack
3433       && !(block_stack->data.block.cleanups == NULL_TREE
3434 	   && block_stack->data.block.outer_cleanups == NULL_TREE))
3435     thisblock->data.block.outer_cleanups
3436       = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3437 		   block_stack->data.block.outer_cleanups);
3438   else
3439     thisblock->data.block.outer_cleanups = 0;
3440   thisblock->data.block.label_chain = 0;
3441   thisblock->data.block.innermost_stack_block = stack_block_stack;
3442   thisblock->data.block.first_insn = note;
3443   thisblock->data.block.block_start_count = ++current_block_start_count;
3444   thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3445   block_stack = thisblock;
3446   nesting_stack = thisblock;
3447 
3448   /* Make a new level for allocating stack slots.  */
3449   push_temp_slots ();
3450 }
3451 
3452 /* Specify the scope of temporaries created by TARGET_EXPRs.  Similar
3453    to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3454    expand_expr are made.  After we end the region, we know that all
3455    space for all temporaries that were created by TARGET_EXPRs will be
3456    destroyed and their space freed for reuse.  */
3457 
3458 void
expand_start_target_temps()3459 expand_start_target_temps ()
3460 {
3461   /* This is so that even if the result is preserved, the space
3462      allocated will be freed, as we know that it is no longer in use.  */
3463   push_temp_slots ();
3464 
3465   /* Start a new binding layer that will keep track of all cleanup
3466      actions to be performed.  */
3467   expand_start_bindings (2);
3468 
3469   target_temp_slot_level = temp_slot_level;
3470 }
3471 
3472 void
expand_end_target_temps()3473 expand_end_target_temps ()
3474 {
3475   expand_end_bindings (NULL_TREE, 0, 0);
3476 
3477   /* This is so that even if the result is preserved, the space
3478      allocated will be freed, as we know that it is no longer in use.  */
3479   pop_temp_slots ();
3480 }
3481 
3482 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3483    in question represents the outermost pair of curly braces (i.e. the "body
3484    block") of a function or method.
3485 
3486    For any BLOCK node representing a "body block" of a function or method, the
3487    BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3488    represents the outermost (function) scope for the function or method (i.e.
3489    the one which includes the formal parameters).  The BLOCK_SUPERCONTEXT of
3490    *that* node in turn will point to the relevant FUNCTION_DECL node.  */
3491 
3492 int
is_body_block(stmt)3493 is_body_block (stmt)
3494      tree stmt;
3495 {
3496   if (TREE_CODE (stmt) == BLOCK)
3497     {
3498       tree parent = BLOCK_SUPERCONTEXT (stmt);
3499 
3500       if (parent && TREE_CODE (parent) == BLOCK)
3501 	{
3502 	  tree grandparent = BLOCK_SUPERCONTEXT (parent);
3503 
3504 	  if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3505 	    return 1;
3506 	}
3507     }
3508 
3509   return 0;
3510 }
3511 
3512 /* True if we are currently emitting insns in an area of output code
3513    that is controlled by a conditional expression.  This is used by
3514    the cleanup handling code to generate conditional cleanup actions.  */
3515 
3516 int
conditional_context()3517 conditional_context ()
3518 {
3519   return block_stack && block_stack->data.block.conditional_code;
3520 }
3521 
3522 /* Return an opaque pointer to the current nesting level, so frontend code
3523    can check its own sanity.  */
3524 
3525 struct nesting *
current_nesting_level()3526 current_nesting_level ()
3527 {
3528   return cfun ? block_stack : 0;
3529 }
3530 
3531 /* Emit a handler label for a nonlocal goto handler.
3532    Also emit code to store the handler label in SLOT before BEFORE_INSN.  */
3533 
3534 static rtx
expand_nl_handler_label(slot,before_insn)3535 expand_nl_handler_label (slot, before_insn)
3536      rtx slot, before_insn;
3537 {
3538   rtx insns;
3539   rtx handler_label = gen_label_rtx ();
3540 
3541   /* Don't let cleanup_cfg delete the handler.  */
3542   LABEL_PRESERVE_P (handler_label) = 1;
3543 
3544   start_sequence ();
3545   emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3546   insns = get_insns ();
3547   end_sequence ();
3548   emit_insn_before (insns, before_insn);
3549 
3550   emit_label (handler_label);
3551 
3552   return handler_label;
3553 }
3554 
3555 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3556    handler.  */
3557 static void
expand_nl_goto_receiver()3558 expand_nl_goto_receiver ()
3559 {
3560 #ifdef HAVE_nonlocal_goto
3561   if (! HAVE_nonlocal_goto)
3562 #endif
3563     /* First adjust our frame pointer to its actual value.  It was
3564        previously set to the start of the virtual area corresponding to
3565        the stacked variables when we branched here and now needs to be
3566        adjusted to the actual hardware fp value.
3567 
3568        Assignments are to virtual registers are converted by
3569        instantiate_virtual_regs into the corresponding assignment
3570        to the underlying register (fp in this case) that makes
3571        the original assignment true.
3572        So the following insn will actually be
3573        decrementing fp by STARTING_FRAME_OFFSET.  */
3574     emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3575 
3576 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3577   if (fixed_regs[ARG_POINTER_REGNUM])
3578     {
3579 #ifdef ELIMINABLE_REGS
3580       /* If the argument pointer can be eliminated in favor of the
3581 	 frame pointer, we don't need to restore it.  We assume here
3582 	 that if such an elimination is present, it can always be used.
3583 	 This is the case on all known machines; if we don't make this
3584 	 assumption, we do unnecessary saving on many machines.  */
3585       static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3586       size_t i;
3587 
3588       for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3589 	if (elim_regs[i].from == ARG_POINTER_REGNUM
3590 	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3591 	  break;
3592 
3593       if (i == ARRAY_SIZE (elim_regs))
3594 #endif
3595 	{
3596 	  /* Now restore our arg pointer from the address at which it
3597 	     was saved in our stack frame.  */
3598 	  emit_move_insn (virtual_incoming_args_rtx,
3599 			  copy_to_reg (get_arg_pointer_save_area (cfun)));
3600 	}
3601     }
3602 #endif
3603 
3604 #ifdef HAVE_nonlocal_goto_receiver
3605   if (HAVE_nonlocal_goto_receiver)
3606     emit_insn (gen_nonlocal_goto_receiver ());
3607 #endif
3608 }
3609 
3610 /* Make handlers for nonlocal gotos taking place in the function calls in
3611    block THISBLOCK.  */
3612 
3613 static void
expand_nl_goto_receivers(thisblock)3614 expand_nl_goto_receivers (thisblock)
3615      struct nesting *thisblock;
3616 {
3617   tree link;
3618   rtx afterward = gen_label_rtx ();
3619   rtx insns, slot;
3620   rtx label_list;
3621   int any_invalid;
3622 
3623   /* Record the handler address in the stack slot for that purpose,
3624      during this block, saving and restoring the outer value.  */
3625   if (thisblock->next != 0)
3626     for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3627       {
3628 	rtx save_receiver = gen_reg_rtx (Pmode);
3629 	emit_move_insn (XEXP (slot, 0), save_receiver);
3630 
3631 	start_sequence ();
3632 	emit_move_insn (save_receiver, XEXP (slot, 0));
3633 	insns = get_insns ();
3634 	end_sequence ();
3635 	emit_insn_before (insns, thisblock->data.block.first_insn);
3636       }
3637 
3638   /* Jump around the handlers; they run only when specially invoked.  */
3639   emit_jump (afterward);
3640 
3641   /* Make a separate handler for each label.  */
3642   link = nonlocal_labels;
3643   slot = nonlocal_goto_handler_slots;
3644   label_list = NULL_RTX;
3645   for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3646     /* Skip any labels we shouldn't be able to jump to from here,
3647        we generate one special handler for all of them below which just calls
3648        abort.  */
3649     if (! DECL_TOO_LATE (TREE_VALUE (link)))
3650       {
3651 	rtx lab;
3652 	lab = expand_nl_handler_label (XEXP (slot, 0),
3653 				       thisblock->data.block.first_insn);
3654 	label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3655 
3656 	expand_nl_goto_receiver ();
3657 
3658 	/* Jump to the "real" nonlocal label.  */
3659 	expand_goto (TREE_VALUE (link));
3660       }
3661 
3662   /* A second pass over all nonlocal labels; this time we handle those
3663      we should not be able to jump to at this point.  */
3664   link = nonlocal_labels;
3665   slot = nonlocal_goto_handler_slots;
3666   any_invalid = 0;
3667   for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3668     if (DECL_TOO_LATE (TREE_VALUE (link)))
3669       {
3670 	rtx lab;
3671 	lab = expand_nl_handler_label (XEXP (slot, 0),
3672 				       thisblock->data.block.first_insn);
3673 	label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3674 	any_invalid = 1;
3675       }
3676 
3677   if (any_invalid)
3678     {
3679       expand_nl_goto_receiver ();
3680       expand_builtin_trap ();
3681     }
3682 
3683   nonlocal_goto_handler_labels = label_list;
3684   emit_label (afterward);
3685 }
3686 
3687 /* Warn about any unused VARS (which may contain nodes other than
3688    VAR_DECLs, but such nodes are ignored).  The nodes are connected
3689    via the TREE_CHAIN field.  */
3690 
3691 void
warn_about_unused_variables(vars)3692 warn_about_unused_variables (vars)
3693      tree vars;
3694 {
3695   tree decl;
3696 
3697   if (warn_unused_variable)
3698     for (decl = vars; decl; decl = TREE_CHAIN (decl))
3699       if (TREE_CODE (decl) == VAR_DECL
3700 	  && ! TREE_USED (decl)
3701 	  && ! DECL_IN_SYSTEM_HEADER (decl)
3702 	  && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3703 	warning_with_decl (decl, "unused variable `%s'");
3704 }
3705 
3706 /* Generate RTL code to terminate a binding contour.
3707 
3708    VARS is the chain of VAR_DECL nodes for the variables bound in this
3709    contour.  There may actually be other nodes in this chain, but any
3710    nodes other than VAR_DECLS are ignored.
3711 
3712    MARK_ENDS is nonzero if we should put a note at the beginning
3713    and end of this binding contour.
3714 
3715    DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3716    (That is true automatically if the contour has a saved stack level.)  */
3717 
3718 void
expand_end_bindings(vars,mark_ends,dont_jump_in)3719 expand_end_bindings (vars, mark_ends, dont_jump_in)
3720      tree vars;
3721      int mark_ends;
3722      int dont_jump_in;
3723 {
3724   struct nesting *thisblock = block_stack;
3725 
3726   /* If any of the variables in this scope were not used, warn the
3727      user.  */
3728   warn_about_unused_variables (vars);
3729 
3730   if (thisblock->exit_label)
3731     {
3732       do_pending_stack_adjust ();
3733       emit_label (thisblock->exit_label);
3734     }
3735 
3736   /* If necessary, make handlers for nonlocal gotos taking
3737      place in the function calls in this block.  */
3738   if (function_call_count != thisblock->data.block.n_function_calls
3739       && nonlocal_labels
3740       /* Make handler for outermost block
3741 	 if there were any nonlocal gotos to this function.  */
3742       && (thisblock->next == 0 ? current_function_has_nonlocal_label
3743 	  /* Make handler for inner block if it has something
3744 	     special to do when you jump out of it.  */
3745 	  : (thisblock->data.block.cleanups != 0
3746 	     || thisblock->data.block.stack_level != 0)))
3747     expand_nl_goto_receivers (thisblock);
3748 
3749   /* Don't allow jumping into a block that has a stack level.
3750      Cleanups are allowed, though.  */
3751   if (dont_jump_in
3752       || thisblock->data.block.stack_level != 0)
3753     {
3754       struct label_chain *chain;
3755 
3756       /* Any labels in this block are no longer valid to go to.
3757 	 Mark them to cause an error message.  */
3758       for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3759 	{
3760 	  DECL_TOO_LATE (chain->label) = 1;
3761 	  /* If any goto without a fixup came to this label,
3762 	     that must be an error, because gotos without fixups
3763 	     come from outside all saved stack-levels.  */
3764 	  if (TREE_ADDRESSABLE (chain->label))
3765 	    error_with_decl (chain->label,
3766 			     "label `%s' used before containing binding contour");
3767 	}
3768     }
3769 
3770   /* Restore stack level in effect before the block
3771      (only if variable-size objects allocated).  */
3772   /* Perform any cleanups associated with the block.  */
3773 
3774   if (thisblock->data.block.stack_level != 0
3775       || thisblock->data.block.cleanups != 0)
3776     {
3777       int reachable;
3778       rtx insn;
3779 
3780       /* Don't let cleanups affect ({...}) constructs.  */
3781       int old_expr_stmts_for_value = expr_stmts_for_value;
3782       rtx old_last_expr_value = last_expr_value;
3783       tree old_last_expr_type = last_expr_type;
3784       expr_stmts_for_value = 0;
3785 
3786       /* Only clean up here if this point can actually be reached.  */
3787       insn = get_last_insn ();
3788       if (GET_CODE (insn) == NOTE)
3789 	insn = prev_nonnote_insn (insn);
3790       reachable = (! insn || GET_CODE (insn) != BARRIER);
3791 
3792       /* Do the cleanups.  */
3793       expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3794       if (reachable)
3795 	do_pending_stack_adjust ();
3796 
3797       expr_stmts_for_value = old_expr_stmts_for_value;
3798       last_expr_value = old_last_expr_value;
3799       last_expr_type = old_last_expr_type;
3800 
3801       /* Restore the stack level.  */
3802 
3803       if (reachable && thisblock->data.block.stack_level != 0)
3804 	{
3805 	  emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3806 			      thisblock->data.block.stack_level, NULL_RTX);
3807 	  if (nonlocal_goto_handler_slots != 0)
3808 	    emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3809 			     NULL_RTX);
3810 	}
3811 
3812       /* Any gotos out of this block must also do these things.
3813 	 Also report any gotos with fixups that came to labels in this
3814 	 level.  */
3815       fixup_gotos (thisblock,
3816 		   thisblock->data.block.stack_level,
3817 		   thisblock->data.block.cleanups,
3818 		   thisblock->data.block.first_insn,
3819 		   dont_jump_in);
3820     }
3821 
3822   /* Mark the beginning and end of the scope if requested.
3823      We do this now, after running cleanups on the variables
3824      just going out of scope, so they are in scope for their cleanups.  */
3825 
3826   if (mark_ends)
3827     {
3828       rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3829       NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3830     }
3831   else
3832     /* Get rid of the beginning-mark if we don't make an end-mark.  */
3833     NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3834 
3835   /* Restore the temporary level of TARGET_EXPRs.  */
3836   target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3837 
3838   /* Restore block_stack level for containing block.  */
3839 
3840   stack_block_stack = thisblock->data.block.innermost_stack_block;
3841   POPSTACK (block_stack);
3842 
3843   /* Pop the stack slot nesting and free any slots at this level.  */
3844   pop_temp_slots ();
3845 }
3846 
3847 /* Generate code to save the stack pointer at the start of the current block
3848    and set up to restore it on exit.  */
3849 
3850 void
save_stack_pointer()3851 save_stack_pointer ()
3852 {
3853   struct nesting *thisblock = block_stack;
3854 
3855   if (thisblock->data.block.stack_level == 0)
3856     {
3857       emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3858 		       &thisblock->data.block.stack_level,
3859 		       thisblock->data.block.first_insn);
3860       stack_block_stack = thisblock;
3861     }
3862 }
3863 
3864 /* Generate RTL for the automatic variable declaration DECL.
3865    (Other kinds of declarations are simply ignored if seen here.)  */
3866 
3867 void
expand_decl(decl)3868 expand_decl (decl)
3869      tree decl;
3870 {
3871   struct nesting *thisblock;
3872   tree type;
3873 
3874   type = TREE_TYPE (decl);
3875 
3876   /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3877      type in case this node is used in a reference.  */
3878   if (TREE_CODE (decl) == CONST_DECL)
3879     {
3880       DECL_MODE (decl) = TYPE_MODE (type);
3881       DECL_ALIGN (decl) = TYPE_ALIGN (type);
3882       DECL_SIZE (decl) = TYPE_SIZE (type);
3883       DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3884       return;
3885     }
3886 
3887   /* Otherwise, only automatic variables need any expansion done.  Static and
3888      external variables, and external functions, will be handled by
3889      `assemble_variable' (called from finish_decl).  TYPE_DECL requires
3890      nothing.  PARM_DECLs are handled in `assign_parms'.  */
3891   if (TREE_CODE (decl) != VAR_DECL)
3892     return;
3893 
3894   if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3895     return;
3896 
3897   thisblock = block_stack;
3898 
3899   /* Create the RTL representation for the variable.  */
3900 
3901   if (type == error_mark_node)
3902     SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3903 
3904   else if (DECL_SIZE (decl) == 0)
3905     /* Variable with incomplete type.  */
3906     {
3907       rtx x;
3908       if (DECL_INITIAL (decl) == 0)
3909 	/* Error message was already done; now avoid a crash.  */
3910 	x = gen_rtx_MEM (BLKmode, const0_rtx);
3911       else
3912 	/* An initializer is going to decide the size of this array.
3913 	   Until we know the size, represent its address with a reg.  */
3914 	x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3915 
3916       set_mem_attributes (x, decl, 1);
3917       SET_DECL_RTL (decl, x);
3918     }
3919   else if (DECL_MODE (decl) != BLKmode
3920 	   /* If -ffloat-store, don't put explicit float vars
3921 	      into regs.  */
3922 	   && !(flag_float_store
3923 		&& TREE_CODE (type) == REAL_TYPE)
3924 	   && ! TREE_THIS_VOLATILE (decl)
3925 	   && ! DECL_NONLOCAL (decl)
3926 	   && (DECL_REGISTER (decl) || optimize))
3927     {
3928       /* Automatic variable that can go in a register.  */
3929       int unsignedp = TREE_UNSIGNED (type);
3930       enum machine_mode reg_mode
3931 	= promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3932 
3933       SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3934 
3935       if (GET_CODE (DECL_RTL (decl)) == REG)
3936 	REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
3937       else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
3938 	{
3939 	  REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
3940 	  REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
3941 	}
3942 
3943       mark_user_reg (DECL_RTL (decl));
3944 
3945       if (POINTER_TYPE_P (type))
3946 	mark_reg_pointer (DECL_RTL (decl),
3947 			  TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3948 
3949       maybe_set_unchanging (DECL_RTL (decl), decl);
3950 
3951       /* If something wants our address, try to use ADDRESSOF.  */
3952       if (TREE_ADDRESSABLE (decl))
3953 	put_var_into_stack (decl, /*rescan=*/false);
3954     }
3955 
3956   else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3957 	   && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3958 		 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3959 					  STACK_CHECK_MAX_VAR_SIZE)))
3960     {
3961       /* Variable of fixed size that goes on the stack.  */
3962       rtx oldaddr = 0;
3963       rtx addr;
3964       rtx x;
3965 
3966       /* If we previously made RTL for this decl, it must be an array
3967 	 whose size was determined by the initializer.
3968 	 The old address was a register; set that register now
3969 	 to the proper address.  */
3970       if (DECL_RTL_SET_P (decl))
3971 	{
3972 	  if (GET_CODE (DECL_RTL (decl)) != MEM
3973 	      || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3974 	    abort ();
3975 	  oldaddr = XEXP (DECL_RTL (decl), 0);
3976 	}
3977 
3978       /* Set alignment we actually gave this decl.  */
3979       DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3980 			   : GET_MODE_BITSIZE (DECL_MODE (decl)));
3981       DECL_USER_ALIGN (decl) = 0;
3982 
3983       x = assign_temp (decl, 1, 1, 1);
3984       set_mem_attributes (x, decl, 1);
3985       SET_DECL_RTL (decl, x);
3986 
3987       if (oldaddr)
3988 	{
3989 	  addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3990 	  if (addr != oldaddr)
3991 	    emit_move_insn (oldaddr, addr);
3992 	}
3993     }
3994   else
3995     /* Dynamic-size object: must push space on the stack.  */
3996     {
3997       rtx address, size, x;
3998 
3999       if (warn_variable_decl)
4000 	warning ("variable-sized declaration");
4001 
4002       /* Record the stack pointer on entry to block, if have
4003 	 not already done so.  */
4004       do_pending_stack_adjust ();
4005       save_stack_pointer ();
4006 
4007       /* In function-at-a-time mode, variable_size doesn't expand this,
4008 	 so do it now.  */
4009       if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4010 	expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4011 		     const0_rtx, VOIDmode, 0);
4012 
4013       /* Compute the variable's size, in bytes.  */
4014       size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4015       free_temp_slots ();
4016 
4017       /* Allocate space on the stack for the variable.  Note that
4018 	 DECL_ALIGN says how the variable is to be aligned and we
4019 	 cannot use it to conclude anything about the alignment of
4020 	 the size.  */
4021       address = allocate_dynamic_stack_space (size, NULL_RTX,
4022 					      TYPE_ALIGN (TREE_TYPE (decl)));
4023 
4024       /* Reference the variable indirect through that rtx.  */
4025       x = gen_rtx_MEM (DECL_MODE (decl), address);
4026       set_mem_attributes (x, decl, 1);
4027       SET_DECL_RTL (decl, x);
4028 
4029 
4030       /* Indicate the alignment we actually gave this variable.  */
4031 #ifdef STACK_BOUNDARY
4032       DECL_ALIGN (decl) = STACK_BOUNDARY;
4033 #else
4034       DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4035 #endif
4036       DECL_USER_ALIGN (decl) = 0;
4037     }
4038 }
4039 
4040 /* Emit code to perform the initialization of a declaration DECL.  */
4041 
4042 void
expand_decl_init(decl)4043 expand_decl_init (decl)
4044      tree decl;
4045 {
4046   int was_used = TREE_USED (decl);
4047 
4048   /* If this is a CONST_DECL, we don't have to generate any code.  Likewise
4049      for static decls.  */
4050   if (TREE_CODE (decl) == CONST_DECL
4051       || TREE_STATIC (decl))
4052     return;
4053 
4054   /* Compute and store the initial value now.  */
4055 
4056   push_temp_slots ();
4057 
4058   if (DECL_INITIAL (decl) == error_mark_node)
4059     {
4060       enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4061 
4062       if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4063 	  || code == POINTER_TYPE || code == REFERENCE_TYPE)
4064 	expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4065 			   0, 0);
4066       emit_queue ();
4067     }
4068   else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4069     {
4070       emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4071       expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4072       emit_queue ();
4073     }
4074 
4075   /* Don't let the initialization count as "using" the variable.  */
4076   TREE_USED (decl) = was_used;
4077 
4078   /* Free any temporaries we made while initializing the decl.  */
4079   preserve_temp_slots (NULL_RTX);
4080   free_temp_slots ();
4081   pop_temp_slots ();
4082 }
4083 
4084 /* CLEANUP is an expression to be executed at exit from this binding contour;
4085    for example, in C++, it might call the destructor for this variable.
4086 
4087    We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4088    CLEANUP multiple times, and have the correct semantics.  This
4089    happens in exception handling, for gotos, returns, breaks that
4090    leave the current scope.
4091 
4092    If CLEANUP is nonzero and DECL is zero, we record a cleanup
4093    that is not associated with any particular variable.  */
4094 
4095 int
expand_decl_cleanup(decl,cleanup)4096 expand_decl_cleanup (decl, cleanup)
4097      tree decl, cleanup;
4098 {
4099   struct nesting *thisblock;
4100 
4101   /* Error if we are not in any block.  */
4102   if (cfun == 0 || block_stack == 0)
4103     return 0;
4104 
4105   thisblock = block_stack;
4106 
4107   /* Record the cleanup if there is one.  */
4108 
4109   if (cleanup != 0)
4110     {
4111       tree t;
4112       rtx seq;
4113       tree *cleanups = &thisblock->data.block.cleanups;
4114       int cond_context = conditional_context ();
4115 
4116       if (cond_context)
4117 	{
4118 	  rtx flag = gen_reg_rtx (word_mode);
4119 	  rtx set_flag_0;
4120 	  tree cond;
4121 
4122 	  start_sequence ();
4123 	  emit_move_insn (flag, const0_rtx);
4124 	  set_flag_0 = get_insns ();
4125 	  end_sequence ();
4126 
4127 	  thisblock->data.block.last_unconditional_cleanup
4128 	    = emit_insn_after (set_flag_0,
4129 				thisblock->data.block.last_unconditional_cleanup);
4130 
4131 	  emit_move_insn (flag, const1_rtx);
4132 
4133 	  cond = build_decl (VAR_DECL, NULL_TREE,
4134 			     (*lang_hooks.types.type_for_mode) (word_mode, 1));
4135 	  SET_DECL_RTL (cond, flag);
4136 
4137 	  /* Conditionalize the cleanup.  */
4138 	  cleanup = build (COND_EXPR, void_type_node,
4139 			   (*lang_hooks.truthvalue_conversion) (cond),
4140 			   cleanup, integer_zero_node);
4141 	  cleanup = fold (cleanup);
4142 
4143 	  cleanups = &thisblock->data.block.cleanups;
4144 	}
4145 
4146       cleanup = unsave_expr (cleanup);
4147 
4148       t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4149 
4150       if (! cond_context)
4151 	/* If this block has a cleanup, it belongs in stack_block_stack.  */
4152 	stack_block_stack = thisblock;
4153 
4154       if (cond_context)
4155 	{
4156 	  start_sequence ();
4157 	}
4158 
4159       if (! using_eh_for_cleanups_p)
4160 	TREE_ADDRESSABLE (t) = 1;
4161       else
4162 	expand_eh_region_start ();
4163 
4164       if (cond_context)
4165 	{
4166 	  seq = get_insns ();
4167 	  end_sequence ();
4168 	  if (seq)
4169 	    thisblock->data.block.last_unconditional_cleanup
4170 	      = emit_insn_after (seq,
4171 				 thisblock->data.block.last_unconditional_cleanup);
4172 	}
4173       else
4174 	{
4175 	  thisblock->data.block.last_unconditional_cleanup
4176 	    = get_last_insn ();
4177 	  /* When we insert instructions after the last unconditional cleanup,
4178 	     we don't adjust last_insn.  That means that a later add_insn will
4179 	     clobber the instructions we've just added.  The easiest way to
4180 	     fix this is to just insert another instruction here, so that the
4181 	     instructions inserted after the last unconditional cleanup are
4182 	     never the last instruction.  */
4183 	  emit_note (NULL, NOTE_INSN_DELETED);
4184 	}
4185     }
4186   return 1;
4187 }
4188 
4189 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4190    is thrown.  */
4191 
4192 int
expand_decl_cleanup_eh(decl,cleanup,eh_only)4193 expand_decl_cleanup_eh (decl, cleanup, eh_only)
4194      tree decl, cleanup;
4195      int eh_only;
4196 {
4197   int ret = expand_decl_cleanup (decl, cleanup);
4198   if (cleanup && ret)
4199     {
4200       tree node = block_stack->data.block.cleanups;
4201       CLEANUP_EH_ONLY (node) = eh_only;
4202     }
4203   return ret;
4204 }
4205 
4206 /* DECL is an anonymous union.  CLEANUP is a cleanup for DECL.
4207    DECL_ELTS is the list of elements that belong to DECL's type.
4208    In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup.  */
4209 
4210 void
expand_anon_union_decl(decl,cleanup,decl_elts)4211 expand_anon_union_decl (decl, cleanup, decl_elts)
4212      tree decl, cleanup, decl_elts;
4213 {
4214   struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4215   rtx x;
4216   tree t;
4217 
4218   /* If any of the elements are addressable, so is the entire union.  */
4219   for (t = decl_elts; t; t = TREE_CHAIN (t))
4220     if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4221       {
4222 	TREE_ADDRESSABLE (decl) = 1;
4223 	break;
4224       }
4225 
4226   expand_decl (decl);
4227   expand_decl_cleanup (decl, cleanup);
4228   x = DECL_RTL (decl);
4229 
4230   /* Go through the elements, assigning RTL to each.  */
4231   for (t = decl_elts; t; t = TREE_CHAIN (t))
4232     {
4233       tree decl_elt = TREE_VALUE (t);
4234       tree cleanup_elt = TREE_PURPOSE (t);
4235       enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4236 
4237       /* If any of the elements are addressable, so is the entire
4238 	 union.  */
4239       if (TREE_USED (decl_elt))
4240 	TREE_USED (decl) = 1;
4241 
4242       /* Propagate the union's alignment to the elements.  */
4243       DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4244       DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4245 
4246       /* If the element has BLKmode and the union doesn't, the union is
4247          aligned such that the element doesn't need to have BLKmode, so
4248          change the element's mode to the appropriate one for its size.  */
4249       if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4250 	DECL_MODE (decl_elt) = mode
4251 	  = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4252 
4253       /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4254          instead create a new MEM rtx with the proper mode.  */
4255       if (GET_CODE (x) == MEM)
4256 	{
4257 	  if (mode == GET_MODE (x))
4258 	    SET_DECL_RTL (decl_elt, x);
4259 	  else
4260 	    SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4261 	}
4262       else if (GET_CODE (x) == REG)
4263 	{
4264 	  if (mode == GET_MODE (x))
4265 	    SET_DECL_RTL (decl_elt, x);
4266 	  else
4267 	    SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4268 	}
4269       else
4270 	abort ();
4271 
4272       /* Record the cleanup if there is one.  */
4273 
4274       if (cleanup != 0)
4275 	thisblock->data.block.cleanups
4276 	  = tree_cons (decl_elt, cleanup_elt,
4277 		       thisblock->data.block.cleanups);
4278     }
4279 }
4280 
4281 /* Expand a list of cleanups LIST.
4282    Elements may be expressions or may be nested lists.
4283 
4284    If DONT_DO is nonnull, then any list-element
4285    whose TREE_PURPOSE matches DONT_DO is omitted.
4286    This is sometimes used to avoid a cleanup associated with
4287    a value that is being returned out of the scope.
4288 
4289    If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4290    goto and handle protection regions specially in that case.
4291 
4292    If REACHABLE, we emit code, otherwise just inform the exception handling
4293    code about this finalization.  */
4294 
4295 static void
expand_cleanups(list,dont_do,in_fixup,reachable)4296 expand_cleanups (list, dont_do, in_fixup, reachable)
4297      tree list;
4298      tree dont_do;
4299      int in_fixup;
4300      int reachable;
4301 {
4302   tree tail;
4303   for (tail = list; tail; tail = TREE_CHAIN (tail))
4304     if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4305       {
4306 	if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4307 	  expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4308 	else
4309 	  {
4310 	    if (! in_fixup && using_eh_for_cleanups_p)
4311 	      expand_eh_region_end_cleanup (TREE_VALUE (tail));
4312 
4313 	    if (reachable && !CLEANUP_EH_ONLY (tail))
4314 	      {
4315 		/* Cleanups may be run multiple times.  For example,
4316 		   when exiting a binding contour, we expand the
4317 		   cleanups associated with that contour.  When a goto
4318 		   within that binding contour has a target outside that
4319 		   contour, it will expand all cleanups from its scope to
4320 		   the target.  Though the cleanups are expanded multiple
4321 		   times, the control paths are non-overlapping so the
4322 		   cleanups will not be executed twice.  */
4323 
4324 		/* We may need to protect from outer cleanups.  */
4325 		if (in_fixup && using_eh_for_cleanups_p)
4326 		  {
4327 		    expand_eh_region_start ();
4328 
4329 		    expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4330 
4331 		    expand_eh_region_end_fixup (TREE_VALUE (tail));
4332 		  }
4333 		else
4334 		  expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4335 
4336 		free_temp_slots ();
4337 	      }
4338 	  }
4339       }
4340 }
4341 
4342 /* Mark when the context we are emitting RTL for as a conditional
4343    context, so that any cleanup actions we register with
4344    expand_decl_init will be properly conditionalized when those
4345    cleanup actions are later performed.  Must be called before any
4346    expression (tree) is expanded that is within a conditional context.  */
4347 
4348 void
start_cleanup_deferral()4349 start_cleanup_deferral ()
4350 {
4351   /* block_stack can be NULL if we are inside the parameter list.  It is
4352      OK to do nothing, because cleanups aren't possible here.  */
4353   if (block_stack)
4354     ++block_stack->data.block.conditional_code;
4355 }
4356 
4357 /* Mark the end of a conditional region of code.  Because cleanup
4358    deferrals may be nested, we may still be in a conditional region
4359    after we end the currently deferred cleanups, only after we end all
4360    deferred cleanups, are we back in unconditional code.  */
4361 
4362 void
end_cleanup_deferral()4363 end_cleanup_deferral ()
4364 {
4365   /* block_stack can be NULL if we are inside the parameter list.  It is
4366      OK to do nothing, because cleanups aren't possible here.  */
4367   if (block_stack)
4368     --block_stack->data.block.conditional_code;
4369 }
4370 
4371 /* Move all cleanups from the current block_stack
4372    to the containing block_stack, where they are assumed to
4373    have been created.  If anything can cause a temporary to
4374    be created, but not expanded for more than one level of
4375    block_stacks, then this code will have to change.  */
4376 
4377 void
move_cleanups_up()4378 move_cleanups_up ()
4379 {
4380   struct nesting *block = block_stack;
4381   struct nesting *outer = block->next;
4382 
4383   outer->data.block.cleanups
4384     = chainon (block->data.block.cleanups,
4385 	       outer->data.block.cleanups);
4386   block->data.block.cleanups = 0;
4387 }
4388 
4389 tree
last_cleanup_this_contour()4390 last_cleanup_this_contour ()
4391 {
4392   if (block_stack == 0)
4393     return 0;
4394 
4395   return block_stack->data.block.cleanups;
4396 }
4397 
4398 /* Return 1 if there are any pending cleanups at this point.
4399    If THIS_CONTOUR is nonzero, check the current contour as well.
4400    Otherwise, look only at the contours that enclose this one.  */
4401 
4402 int
any_pending_cleanups(this_contour)4403 any_pending_cleanups (this_contour)
4404      int this_contour;
4405 {
4406   struct nesting *block;
4407 
4408   if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4409     return 0;
4410 
4411   if (this_contour && block_stack->data.block.cleanups != NULL)
4412     return 1;
4413   if (block_stack->data.block.cleanups == 0
4414       && block_stack->data.block.outer_cleanups == 0)
4415     return 0;
4416 
4417   for (block = block_stack->next; block; block = block->next)
4418     if (block->data.block.cleanups != 0)
4419       return 1;
4420 
4421   return 0;
4422 }
4423 
4424 /* Enter a case (Pascal) or switch (C) statement.
4425    Push a block onto case_stack and nesting_stack
4426    to accumulate the case-labels that are seen
4427    and to record the labels generated for the statement.
4428 
4429    EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4430    Otherwise, this construct is transparent for `exit_something'.
4431 
4432    EXPR is the index-expression to be dispatched on.
4433    TYPE is its nominal type.  We could simply convert EXPR to this type,
4434    but instead we take short cuts.  */
4435 
4436 void
expand_start_case(exit_flag,expr,type,printname)4437 expand_start_case (exit_flag, expr, type, printname)
4438      int exit_flag;
4439      tree expr;
4440      tree type;
4441      const char *printname;
4442 {
4443   struct nesting *thiscase = ALLOC_NESTING ();
4444 
4445   /* Make an entry on case_stack for the case we are entering.  */
4446 
4447   thiscase->desc = CASE_NESTING;
4448   thiscase->next = case_stack;
4449   thiscase->all = nesting_stack;
4450   thiscase->depth = ++nesting_depth;
4451   thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4452   thiscase->data.case_stmt.case_list = 0;
4453   thiscase->data.case_stmt.index_expr = expr;
4454   thiscase->data.case_stmt.nominal_type = type;
4455   thiscase->data.case_stmt.default_label = 0;
4456   thiscase->data.case_stmt.printname = printname;
4457   thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4458   case_stack = thiscase;
4459   nesting_stack = thiscase;
4460 
4461   do_pending_stack_adjust ();
4462   emit_queue ();
4463 
4464   /* Make sure case_stmt.start points to something that won't
4465      need any transformation before expand_end_case.  */
4466   if (GET_CODE (get_last_insn ()) != NOTE)
4467     emit_note (NULL, NOTE_INSN_DELETED);
4468 
4469   thiscase->data.case_stmt.start = get_last_insn ();
4470 
4471   start_cleanup_deferral ();
4472 }
4473 
4474 /* Start a "dummy case statement" within which case labels are invalid
4475    and are not connected to any larger real case statement.
4476    This can be used if you don't want to let a case statement jump
4477    into the middle of certain kinds of constructs.  */
4478 
4479 void
expand_start_case_dummy()4480 expand_start_case_dummy ()
4481 {
4482   struct nesting *thiscase = ALLOC_NESTING ();
4483 
4484   /* Make an entry on case_stack for the dummy.  */
4485 
4486   thiscase->desc = CASE_NESTING;
4487   thiscase->next = case_stack;
4488   thiscase->all = nesting_stack;
4489   thiscase->depth = ++nesting_depth;
4490   thiscase->exit_label = 0;
4491   thiscase->data.case_stmt.case_list = 0;
4492   thiscase->data.case_stmt.start = 0;
4493   thiscase->data.case_stmt.nominal_type = 0;
4494   thiscase->data.case_stmt.default_label = 0;
4495   case_stack = thiscase;
4496   nesting_stack = thiscase;
4497   start_cleanup_deferral ();
4498 }
4499 
4500 /* End a dummy case statement.  */
4501 
4502 void
expand_end_case_dummy()4503 expand_end_case_dummy ()
4504 {
4505   end_cleanup_deferral ();
4506   POPSTACK (case_stack);
4507 }
4508 
4509 /* Return the data type of the index-expression
4510    of the innermost case statement, or null if none.  */
4511 
4512 tree
case_index_expr_type()4513 case_index_expr_type ()
4514 {
4515   if (case_stack)
4516     return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4517   return 0;
4518 }
4519 
4520 static void
check_seenlabel()4521 check_seenlabel ()
4522 {
4523   /* If this is the first label, warn if any insns have been emitted.  */
4524   if (case_stack->data.case_stmt.line_number_status >= 0)
4525     {
4526       rtx insn;
4527 
4528       restore_line_number_status
4529 	(case_stack->data.case_stmt.line_number_status);
4530       case_stack->data.case_stmt.line_number_status = -1;
4531 
4532       for (insn = case_stack->data.case_stmt.start;
4533 	   insn;
4534 	   insn = NEXT_INSN (insn))
4535 	{
4536 	  if (GET_CODE (insn) == CODE_LABEL)
4537 	    break;
4538 	  if (GET_CODE (insn) != NOTE
4539 	      && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4540 	    {
4541 	      do
4542 		insn = PREV_INSN (insn);
4543 	      while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4544 
4545 	      /* If insn is zero, then there must have been a syntax error.  */
4546 	      if (insn)
4547 		warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4548 					    NOTE_LINE_NUMBER (insn),
4549 					    "unreachable code at beginning of %s",
4550 					    case_stack->data.case_stmt.printname);
4551 	      break;
4552 	    }
4553 	}
4554     }
4555 }
4556 
4557 /* Accumulate one case or default label inside a case or switch statement.
4558    VALUE is the value of the case (a null pointer, for a default label).
4559    The function CONVERTER, when applied to arguments T and V,
4560    converts the value V to the type T.
4561 
4562    If not currently inside a case or switch statement, return 1 and do
4563    nothing.  The caller will print a language-specific error message.
4564    If VALUE is a duplicate or overlaps, return 2 and do nothing
4565    except store the (first) duplicate node in *DUPLICATE.
4566    If VALUE is out of range, return 3 and do nothing.
4567    If we are jumping into the scope of a cleanup or var-sized array, return 5.
4568    Return 0 on success.
4569 
4570    Extended to handle range statements.  */
4571 
4572 int
pushcase(value,converter,label,duplicate)4573 pushcase (value, converter, label, duplicate)
4574      tree value;
4575      tree (*converter) PARAMS ((tree, tree));
4576      tree label;
4577      tree *duplicate;
4578 {
4579   tree index_type;
4580   tree nominal_type;
4581 
4582   /* Fail if not inside a real case statement.  */
4583   if (! (case_stack && case_stack->data.case_stmt.start))
4584     return 1;
4585 
4586   if (stack_block_stack
4587       && stack_block_stack->depth > case_stack->depth)
4588     return 5;
4589 
4590   index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4591   nominal_type = case_stack->data.case_stmt.nominal_type;
4592 
4593   /* If the index is erroneous, avoid more problems: pretend to succeed.  */
4594   if (index_type == error_mark_node)
4595     return 0;
4596 
4597   /* Convert VALUE to the type in which the comparisons are nominally done.  */
4598   if (value != 0)
4599     value = (*converter) (nominal_type, value);
4600 
4601   check_seenlabel ();
4602 
4603   /* Fail if this value is out of range for the actual type of the index
4604      (which may be narrower than NOMINAL_TYPE).  */
4605   if (value != 0
4606       && (TREE_CONSTANT_OVERFLOW (value)
4607 	  || ! int_fits_type_p (value, index_type)))
4608     return 3;
4609 
4610   return add_case_node (value, value, label, duplicate);
4611 }
4612 
4613 /* Like pushcase but this case applies to all values between VALUE1 and
4614    VALUE2 (inclusive).  If VALUE1 is NULL, the range starts at the lowest
4615    value of the index type and ends at VALUE2.  If VALUE2 is NULL, the range
4616    starts at VALUE1 and ends at the highest value of the index type.
4617    If both are NULL, this case applies to all values.
4618 
4619    The return value is the same as that of pushcase but there is one
4620    additional error code: 4 means the specified range was empty.  */
4621 
4622 int
pushcase_range(value1,value2,converter,label,duplicate)4623 pushcase_range (value1, value2, converter, label, duplicate)
4624      tree value1, value2;
4625      tree (*converter) PARAMS ((tree, tree));
4626      tree label;
4627      tree *duplicate;
4628 {
4629   tree index_type;
4630   tree nominal_type;
4631 
4632   /* Fail if not inside a real case statement.  */
4633   if (! (case_stack && case_stack->data.case_stmt.start))
4634     return 1;
4635 
4636   if (stack_block_stack
4637       && stack_block_stack->depth > case_stack->depth)
4638     return 5;
4639 
4640   index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4641   nominal_type = case_stack->data.case_stmt.nominal_type;
4642 
4643   /* If the index is erroneous, avoid more problems: pretend to succeed.  */
4644   if (index_type == error_mark_node)
4645     return 0;
4646 
4647   check_seenlabel ();
4648 
4649   /* Convert VALUEs to type in which the comparisons are nominally done
4650      and replace any unspecified value with the corresponding bound.  */
4651   if (value1 == 0)
4652     value1 = TYPE_MIN_VALUE (index_type);
4653   if (value2 == 0)
4654     value2 = TYPE_MAX_VALUE (index_type);
4655 
4656   /* Fail if the range is empty.  Do this before any conversion since
4657      we want to allow out-of-range empty ranges.  */
4658   if (value2 != 0 && tree_int_cst_lt (value2, value1))
4659     return 4;
4660 
4661   /* If the max was unbounded, use the max of the nominal_type we are
4662      converting to.  Do this after the < check above to suppress false
4663      positives.  */
4664   if (value2 == 0)
4665     value2 = TYPE_MAX_VALUE (nominal_type);
4666 
4667   value1 = (*converter) (nominal_type, value1);
4668   value2 = (*converter) (nominal_type, value2);
4669 
4670   /* Fail if these values are out of range.  */
4671   if (TREE_CONSTANT_OVERFLOW (value1)
4672       || ! int_fits_type_p (value1, index_type))
4673     return 3;
4674 
4675   if (TREE_CONSTANT_OVERFLOW (value2)
4676       || ! int_fits_type_p (value2, index_type))
4677     return 3;
4678 
4679   return add_case_node (value1, value2, label, duplicate);
4680 }
4681 
4682 /* Do the actual insertion of a case label for pushcase and pushcase_range
4683    into case_stack->data.case_stmt.case_list.  Use an AVL tree to avoid
4684    slowdown for large switch statements.  */
4685 
4686 int
add_case_node(low,high,label,duplicate)4687 add_case_node (low, high, label, duplicate)
4688      tree low, high;
4689      tree label;
4690      tree *duplicate;
4691 {
4692   struct case_node *p, **q, *r;
4693 
4694   /* If there's no HIGH value, then this is not a case range; it's
4695      just a simple case label.  But that's just a degenerate case
4696      range.  */
4697   if (!high)
4698     high = low;
4699 
4700   /* Handle default labels specially.  */
4701   if (!high && !low)
4702     {
4703       if (case_stack->data.case_stmt.default_label != 0)
4704 	{
4705 	  *duplicate = case_stack->data.case_stmt.default_label;
4706 	  return 2;
4707 	}
4708       case_stack->data.case_stmt.default_label = label;
4709       expand_label (label);
4710       return 0;
4711     }
4712 
4713   q = &case_stack->data.case_stmt.case_list;
4714   p = *q;
4715 
4716   while ((r = *q))
4717     {
4718       p = r;
4719 
4720       /* Keep going past elements distinctly greater than HIGH.  */
4721       if (tree_int_cst_lt (high, p->low))
4722 	q = &p->left;
4723 
4724       /* or distinctly less than LOW.  */
4725       else if (tree_int_cst_lt (p->high, low))
4726 	q = &p->right;
4727 
4728       else
4729 	{
4730 	  /* We have an overlap; this is an error.  */
4731 	  *duplicate = p->code_label;
4732 	  return 2;
4733 	}
4734     }
4735 
4736   /* Add this label to the chain, and succeed.  */
4737 
4738   r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
4739   r->low = low;
4740 
4741   /* If the bounds are equal, turn this into the one-value case.  */
4742   if (tree_int_cst_equal (low, high))
4743     r->high = r->low;
4744   else
4745     r->high = high;
4746 
4747   r->code_label = label;
4748   expand_label (label);
4749 
4750   *q = r;
4751   r->parent = p;
4752   r->left = 0;
4753   r->right = 0;
4754   r->balance = 0;
4755 
4756   while (p)
4757     {
4758       struct case_node *s;
4759 
4760       if (r == p->left)
4761 	{
4762 	  int b;
4763 
4764 	  if (! (b = p->balance))
4765 	    /* Growth propagation from left side.  */
4766 	    p->balance = -1;
4767 	  else if (b < 0)
4768 	    {
4769 	      if (r->balance < 0)
4770 		{
4771 		  /* R-Rotation */
4772 		  if ((p->left = s = r->right))
4773 		    s->parent = p;
4774 
4775 		  r->right = p;
4776 		  p->balance = 0;
4777 		  r->balance = 0;
4778 		  s = p->parent;
4779 		  p->parent = r;
4780 
4781 		  if ((r->parent = s))
4782 		    {
4783 		      if (s->left == p)
4784 			s->left = r;
4785 		      else
4786 			s->right = r;
4787 		    }
4788 		  else
4789 		    case_stack->data.case_stmt.case_list = r;
4790 		}
4791 	      else
4792 		/* r->balance == +1 */
4793 		{
4794 		  /* LR-Rotation */
4795 
4796 		  int b2;
4797 		  struct case_node *t = r->right;
4798 
4799 		  if ((p->left = s = t->right))
4800 		    s->parent = p;
4801 
4802 		  t->right = p;
4803 		  if ((r->right = s = t->left))
4804 		    s->parent = r;
4805 
4806 		  t->left = r;
4807 		  b = t->balance;
4808 		  b2 = b < 0;
4809 		  p->balance = b2;
4810 		  b2 = -b2 - b;
4811 		  r->balance = b2;
4812 		  t->balance = 0;
4813 		  s = p->parent;
4814 		  p->parent = t;
4815 		  r->parent = t;
4816 
4817 		  if ((t->parent = s))
4818 		    {
4819 		      if (s->left == p)
4820 			s->left = t;
4821 		      else
4822 			s->right = t;
4823 		    }
4824 		  else
4825 		    case_stack->data.case_stmt.case_list = t;
4826 		}
4827 	      break;
4828 	    }
4829 
4830 	  else
4831 	    {
4832 	      /* p->balance == +1; growth of left side balances the node.  */
4833 	      p->balance = 0;
4834 	      break;
4835 	    }
4836 	}
4837       else
4838 	/* r == p->right */
4839 	{
4840 	  int b;
4841 
4842 	  if (! (b = p->balance))
4843 	    /* Growth propagation from right side.  */
4844 	    p->balance++;
4845 	  else if (b > 0)
4846 	    {
4847 	      if (r->balance > 0)
4848 		{
4849 		  /* L-Rotation */
4850 
4851 		  if ((p->right = s = r->left))
4852 		    s->parent = p;
4853 
4854 		  r->left = p;
4855 		  p->balance = 0;
4856 		  r->balance = 0;
4857 		  s = p->parent;
4858 		  p->parent = r;
4859 		  if ((r->parent = s))
4860 		    {
4861 		      if (s->left == p)
4862 			s->left = r;
4863 		      else
4864 			s->right = r;
4865 		    }
4866 
4867 		  else
4868 		    case_stack->data.case_stmt.case_list = r;
4869 		}
4870 
4871 	      else
4872 		/* r->balance == -1 */
4873 		{
4874 		  /* RL-Rotation */
4875 		  int b2;
4876 		  struct case_node *t = r->left;
4877 
4878 		  if ((p->right = s = t->left))
4879 		    s->parent = p;
4880 
4881 		  t->left = p;
4882 
4883 		  if ((r->left = s = t->right))
4884 		    s->parent = r;
4885 
4886 		  t->right = r;
4887 		  b = t->balance;
4888 		  b2 = b < 0;
4889 		  r->balance = b2;
4890 		  b2 = -b2 - b;
4891 		  p->balance = b2;
4892 		  t->balance = 0;
4893 		  s = p->parent;
4894 		  p->parent = t;
4895 		  r->parent = t;
4896 
4897 		  if ((t->parent = s))
4898 		    {
4899 		      if (s->left == p)
4900 			s->left = t;
4901 		      else
4902 			s->right = t;
4903 		    }
4904 
4905 		  else
4906 		    case_stack->data.case_stmt.case_list = t;
4907 		}
4908 	      break;
4909 	    }
4910 	  else
4911 	    {
4912 	      /* p->balance == -1; growth of right side balances the node.  */
4913 	      p->balance = 0;
4914 	      break;
4915 	    }
4916 	}
4917 
4918       r = p;
4919       p = p->parent;
4920     }
4921 
4922   return 0;
4923 }
4924 
4925 /* Returns the number of possible values of TYPE.
4926    Returns -1 if the number is unknown, variable, or if the number does not
4927    fit in a HOST_WIDE_INT.
4928    Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4929    do not increase monotonically (there may be duplicates);
4930    to 1 if the values increase monotonically, but not always by 1;
4931    otherwise sets it to 0.  */
4932 
4933 HOST_WIDE_INT
all_cases_count(type,sparseness)4934 all_cases_count (type, sparseness)
4935      tree type;
4936      int *sparseness;
4937 {
4938   tree t;
4939   HOST_WIDE_INT count, minval, lastval;
4940 
4941   *sparseness = 0;
4942 
4943   switch (TREE_CODE (type))
4944     {
4945     case BOOLEAN_TYPE:
4946       count = 2;
4947       break;
4948 
4949     case CHAR_TYPE:
4950       count = 1 << BITS_PER_UNIT;
4951       break;
4952 
4953     default:
4954     case INTEGER_TYPE:
4955       if (TYPE_MAX_VALUE (type) != 0
4956 	  && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4957 				    TYPE_MIN_VALUE (type))))
4958 	  && 0 != (t = fold (build (PLUS_EXPR, type, t,
4959 				    convert (type, integer_zero_node))))
4960 	  && host_integerp (t, 1))
4961 	count = tree_low_cst (t, 1);
4962       else
4963 	return -1;
4964       break;
4965 
4966     case ENUMERAL_TYPE:
4967       /* Don't waste time with enumeral types with huge values.  */
4968       if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4969 	  || TYPE_MAX_VALUE (type) == 0
4970 	  || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4971 	return -1;
4972 
4973       lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4974       count = 0;
4975 
4976       for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4977 	{
4978 	  HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4979 
4980 	  if (*sparseness == 2 || thisval <= lastval)
4981 	    *sparseness = 2;
4982 	  else if (thisval != minval + count)
4983 	    *sparseness = 1;
4984 
4985 	  lastval = thisval;
4986 	  count++;
4987 	}
4988     }
4989 
4990   return count;
4991 }
4992 
4993 #define BITARRAY_TEST(ARRAY, INDEX) \
4994   ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4995 			  & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4996 #define BITARRAY_SET(ARRAY, INDEX) \
4997   ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4998 			  |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4999 
5000 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5001    with the case values we have seen, assuming the case expression
5002    has the given TYPE.
5003    SPARSENESS is as determined by all_cases_count.
5004 
5005    The time needed is proportional to COUNT, unless
5006    SPARSENESS is 2, in which case quadratic time is needed.  */
5007 
5008 void
mark_seen_cases(type,cases_seen,count,sparseness)5009 mark_seen_cases (type, cases_seen, count, sparseness)
5010      tree type;
5011      unsigned char *cases_seen;
5012      HOST_WIDE_INT count;
5013      int sparseness;
5014 {
5015   tree next_node_to_try = NULL_TREE;
5016   HOST_WIDE_INT next_node_offset = 0;
5017 
5018   struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5019   tree val = make_node (INTEGER_CST);
5020 
5021   TREE_TYPE (val) = type;
5022   if (! root)
5023     /* Do nothing.  */
5024     ;
5025   else if (sparseness == 2)
5026     {
5027       tree t;
5028       unsigned HOST_WIDE_INT xlo;
5029 
5030       /* This less efficient loop is only needed to handle
5031 	 duplicate case values (multiple enum constants
5032 	 with the same value).  */
5033       TREE_TYPE (val) = TREE_TYPE (root->low);
5034       for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5035 	   t = TREE_CHAIN (t), xlo++)
5036 	{
5037 	  TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5038 	  TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5039 	  n = root;
5040 	  do
5041 	    {
5042 	      /* Keep going past elements distinctly greater than VAL.  */
5043 	      if (tree_int_cst_lt (val, n->low))
5044 		n = n->left;
5045 
5046 	      /* or distinctly less than VAL.  */
5047 	      else if (tree_int_cst_lt (n->high, val))
5048 		n = n->right;
5049 
5050 	      else
5051 		{
5052 		  /* We have found a matching range.  */
5053 		  BITARRAY_SET (cases_seen, xlo);
5054 		  break;
5055 		}
5056 	    }
5057 	  while (n);
5058 	}
5059     }
5060   else
5061     {
5062       if (root->left)
5063 	case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5064 
5065       for (n = root; n; n = n->right)
5066 	{
5067 	  TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5068 	  TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5069 	  while (! tree_int_cst_lt (n->high, val))
5070 	    {
5071 	      /* Calculate (into xlo) the "offset" of the integer (val).
5072 		 The element with lowest value has offset 0, the next smallest
5073 		 element has offset 1, etc.  */
5074 
5075 	      unsigned HOST_WIDE_INT xlo;
5076 	      HOST_WIDE_INT xhi;
5077 	      tree t;
5078 
5079 	      if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5080 		{
5081 		  /* The TYPE_VALUES will be in increasing order, so
5082 		     starting searching where we last ended.  */
5083 		  t = next_node_to_try;
5084 		  xlo = next_node_offset;
5085 		  xhi = 0;
5086 		  for (;;)
5087 		    {
5088 		      if (t == NULL_TREE)
5089 			{
5090 			  t = TYPE_VALUES (type);
5091 			  xlo = 0;
5092 			}
5093 		      if (tree_int_cst_equal (val, TREE_VALUE (t)))
5094 			{
5095 			  next_node_to_try = TREE_CHAIN (t);
5096 			  next_node_offset = xlo + 1;
5097 			  break;
5098 			}
5099 		      xlo++;
5100 		      t = TREE_CHAIN (t);
5101 		      if (t == next_node_to_try)
5102 			{
5103 			  xlo = -1;
5104 			  break;
5105 			}
5106 		    }
5107 		}
5108 	      else
5109 		{
5110 		  t = TYPE_MIN_VALUE (type);
5111 		  if (t)
5112 		    neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5113 				&xlo, &xhi);
5114 		  else
5115 		    xlo = xhi = 0;
5116 		  add_double (xlo, xhi,
5117 			      TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5118 			      &xlo, &xhi);
5119 		}
5120 
5121 	      if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5122 		BITARRAY_SET (cases_seen, xlo);
5123 
5124 	      add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5125 			  1, 0,
5126 			  &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5127 	    }
5128 	}
5129     }
5130 }
5131 
5132 /* Given a switch statement with an expression that is an enumeration
5133    type, warn if any of the enumeration type's literals are not
5134    covered by the case expressions of the switch.  Also, warn if there
5135    are any extra switch cases that are *not* elements of the
5136    enumerated type.
5137 
5138    Historical note:
5139 
5140    At one stage this function would: ``If all enumeration literals
5141    were covered by the case expressions, turn one of the expressions
5142    into the default expression since it should not be possible to fall
5143    through such a switch.''
5144 
5145    That code has since been removed as: ``This optimization is
5146    disabled because it causes valid programs to fail.  ANSI C does not
5147    guarantee that an expression with enum type will have a value that
5148    is the same as one of the enumeration literals.''  */
5149 
5150 void
check_for_full_enumeration_handling(type)5151 check_for_full_enumeration_handling (type)
5152      tree type;
5153 {
5154   struct case_node *n;
5155   tree chain;
5156 
5157   /* True iff the selector type is a numbered set mode.  */
5158   int sparseness = 0;
5159 
5160   /* The number of possible selector values.  */
5161   HOST_WIDE_INT size;
5162 
5163   /* For each possible selector value. a one iff it has been matched
5164      by a case value alternative.  */
5165   unsigned char *cases_seen;
5166 
5167   /* The allocated size of cases_seen, in chars.  */
5168   HOST_WIDE_INT bytes_needed;
5169 
5170   size = all_cases_count (type, &sparseness);
5171   bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5172 
5173   if (size > 0 && size < 600000
5174       /* We deliberately use calloc here, not cmalloc, so that we can suppress
5175 	 this optimization if we don't have enough memory rather than
5176 	 aborting, as xmalloc would do.  */
5177       && (cases_seen =
5178 	  (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5179     {
5180       HOST_WIDE_INT i;
5181       tree v = TYPE_VALUES (type);
5182 
5183       /* The time complexity of this code is normally O(N), where
5184 	 N being the number of members in the enumerated type.
5185 	 However, if type is an ENUMERAL_TYPE whose values do not
5186 	 increase monotonically, O(N*log(N)) time may be needed.  */
5187 
5188       mark_seen_cases (type, cases_seen, size, sparseness);
5189 
5190       for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5191 	if (BITARRAY_TEST (cases_seen, i) == 0)
5192 	  warning ("enumeration value `%s' not handled in switch",
5193 		   IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5194 
5195       free (cases_seen);
5196     }
5197 
5198   /* Now we go the other way around; we warn if there are case
5199      expressions that don't correspond to enumerators.  This can
5200      occur since C and C++ don't enforce type-checking of
5201      assignments to enumeration variables.  */
5202 
5203   if (case_stack->data.case_stmt.case_list
5204       && case_stack->data.case_stmt.case_list->left)
5205     case_stack->data.case_stmt.case_list
5206       = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5207   for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5208     {
5209       for (chain = TYPE_VALUES (type);
5210 	   chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5211 	   chain = TREE_CHAIN (chain))
5212 	;
5213 
5214       if (!chain)
5215 	{
5216 	  if (TYPE_NAME (type) == 0)
5217 	    warning ("case value `%ld' not in enumerated type",
5218 		     (long) TREE_INT_CST_LOW (n->low));
5219 	  else
5220 	    warning ("case value `%ld' not in enumerated type `%s'",
5221 		     (long) TREE_INT_CST_LOW (n->low),
5222 		     IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5223 					  == IDENTIFIER_NODE)
5224 					 ? TYPE_NAME (type)
5225 					 : DECL_NAME (TYPE_NAME (type))));
5226 	}
5227       if (!tree_int_cst_equal (n->low, n->high))
5228 	{
5229 	  for (chain = TYPE_VALUES (type);
5230 	       chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5231 	       chain = TREE_CHAIN (chain))
5232 	    ;
5233 
5234 	  if (!chain)
5235 	    {
5236 	      if (TYPE_NAME (type) == 0)
5237 		warning ("case value `%ld' not in enumerated type",
5238 			 (long) TREE_INT_CST_LOW (n->high));
5239 	      else
5240 		warning ("case value `%ld' not in enumerated type `%s'",
5241 			 (long) TREE_INT_CST_LOW (n->high),
5242 			 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5243 					      == IDENTIFIER_NODE)
5244 					     ? TYPE_NAME (type)
5245 					     : DECL_NAME (TYPE_NAME (type))));
5246 	    }
5247 	}
5248     }
5249 }
5250 
5251 
5252 
5253 /* Terminate a case (Pascal) or switch (C) statement
5254    in which ORIG_INDEX is the expression to be tested.
5255    If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5256    type as given in the source before any compiler conversions.
5257    Generate the code to test it and jump to the right place.  */
5258 
5259 void
expand_end_case_type(orig_index,orig_type)5260 expand_end_case_type (orig_index, orig_type)
5261      tree orig_index, orig_type;
5262 {
5263   tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5264   rtx default_label = 0;
5265   struct case_node *n;
5266   unsigned int count;
5267   rtx index;
5268   rtx table_label;
5269   int ncases;
5270   rtx *labelvec;
5271   int i;
5272   rtx before_case, end;
5273   struct nesting *thiscase = case_stack;
5274   tree index_expr, index_type;
5275   int unsignedp;
5276 
5277   /* Don't crash due to previous errors.  */
5278   if (thiscase == NULL)
5279     return;
5280 
5281   table_label = gen_label_rtx ();
5282   index_expr = thiscase->data.case_stmt.index_expr;
5283   index_type = TREE_TYPE (index_expr);
5284   unsignedp = TREE_UNSIGNED (index_type);
5285   if (orig_type == NULL)
5286     orig_type = TREE_TYPE (orig_index);
5287 
5288   do_pending_stack_adjust ();
5289 
5290   /* This might get a spurious warning in the presence of a syntax error;
5291      it could be fixed by moving the call to check_seenlabel after the
5292      check for error_mark_node, and copying the code of check_seenlabel that
5293      deals with case_stack->data.case_stmt.line_number_status /
5294      restore_line_number_status in front of the call to end_cleanup_deferral;
5295      However, this might miss some useful warnings in the presence of
5296      non-syntax errors.  */
5297   check_seenlabel ();
5298 
5299   /* An ERROR_MARK occurs for various reasons including invalid data type.  */
5300   if (index_type != error_mark_node)
5301     {
5302       /* If the switch expression was an enumerated type, check that
5303 	 exactly all enumeration literals are covered by the cases.
5304 	 The check is made when -Wswitch was specified and there is no
5305 	 default case, or when -Wswitch-enum was specified.  */
5306       if (((warn_switch && !thiscase->data.case_stmt.default_label)
5307 	   || warn_switch_enum)
5308 	  && TREE_CODE (orig_type) == ENUMERAL_TYPE
5309 	  && TREE_CODE (index_expr) != INTEGER_CST)
5310 	check_for_full_enumeration_handling (orig_type);
5311 
5312       if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5313 	warning ("switch missing default case");
5314 
5315       /* If we don't have a default-label, create one here,
5316 	 after the body of the switch.  */
5317       if (thiscase->data.case_stmt.default_label == 0)
5318 	{
5319 	  thiscase->data.case_stmt.default_label
5320 	    = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5321 	  expand_label (thiscase->data.case_stmt.default_label);
5322 	}
5323       default_label = label_rtx (thiscase->data.case_stmt.default_label);
5324 
5325       before_case = get_last_insn ();
5326 
5327       if (thiscase->data.case_stmt.case_list
5328 	  && thiscase->data.case_stmt.case_list->left)
5329 	thiscase->data.case_stmt.case_list
5330 	  = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5331 
5332       /* Simplify the case-list before we count it.  */
5333       group_case_nodes (thiscase->data.case_stmt.case_list);
5334 
5335       /* Get upper and lower bounds of case values.
5336 	 Also convert all the case values to the index expr's data type.  */
5337 
5338       count = 0;
5339       for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5340 	{
5341 	  /* Check low and high label values are integers.  */
5342 	  if (TREE_CODE (n->low) != INTEGER_CST)
5343 	    abort ();
5344 	  if (TREE_CODE (n->high) != INTEGER_CST)
5345 	    abort ();
5346 
5347 	  n->low = convert (index_type, n->low);
5348 	  n->high = convert (index_type, n->high);
5349 
5350 	  /* Count the elements and track the largest and smallest
5351 	     of them (treating them as signed even if they are not).  */
5352 	  if (count++ == 0)
5353 	    {
5354 	      minval = n->low;
5355 	      maxval = n->high;
5356 	    }
5357 	  else
5358 	    {
5359 	      if (INT_CST_LT (n->low, minval))
5360 		minval = n->low;
5361 	      if (INT_CST_LT (maxval, n->high))
5362 		maxval = n->high;
5363 	    }
5364 	  /* A range counts double, since it requires two compares.  */
5365 	  if (! tree_int_cst_equal (n->low, n->high))
5366 	    count++;
5367 	}
5368 
5369       /* Compute span of values.  */
5370       if (count != 0)
5371 	range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5372 
5373       end_cleanup_deferral ();
5374 
5375       if (count == 0)
5376 	{
5377 	  expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5378 	  emit_queue ();
5379 	  emit_jump (default_label);
5380 	}
5381 
5382       /* If range of values is much bigger than number of values,
5383 	 make a sequence of conditional branches instead of a dispatch.
5384 	 If the switch-index is a constant, do it this way
5385 	 because we can optimize it.  */
5386 
5387       else if (count < case_values_threshold ()
5388 	       || compare_tree_int (range, 10 * count) > 0
5389 	       /* RANGE may be signed, and really large ranges will show up
5390 		  as negative numbers.  */
5391 	       || compare_tree_int (range, 0) < 0
5392 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5393 	       || flag_pic
5394 #endif
5395 	       || TREE_CODE (index_expr) == INTEGER_CST
5396 	       || (TREE_CODE (index_expr) == COMPOUND_EXPR
5397 		   && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5398 	{
5399 	  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5400 
5401 	  /* If the index is a short or char that we do not have
5402 	     an insn to handle comparisons directly, convert it to
5403 	     a full integer now, rather than letting each comparison
5404 	     generate the conversion.  */
5405 
5406 	  if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5407 	      && ! have_insn_for (COMPARE, GET_MODE (index)))
5408 	    {
5409 	      enum machine_mode wider_mode;
5410 	      for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5411 		   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5412 		if (have_insn_for (COMPARE, wider_mode))
5413 		  {
5414 		    index = convert_to_mode (wider_mode, index, unsignedp);
5415 		    break;
5416 		  }
5417 	    }
5418 
5419 	  emit_queue ();
5420 	  do_pending_stack_adjust ();
5421 
5422 	  index = protect_from_queue (index, 0);
5423 	  if (GET_CODE (index) == MEM)
5424 	    index = copy_to_reg (index);
5425 	  if (GET_CODE (index) == CONST_INT
5426 	      || TREE_CODE (index_expr) == INTEGER_CST)
5427 	    {
5428 	      /* Make a tree node with the proper constant value
5429 		 if we don't already have one.  */
5430 	      if (TREE_CODE (index_expr) != INTEGER_CST)
5431 		{
5432 		  index_expr
5433 		    = build_int_2 (INTVAL (index),
5434 				   unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5435 		  index_expr = convert (index_type, index_expr);
5436 		}
5437 
5438 	      /* For constant index expressions we need only
5439 		 issue an unconditional branch to the appropriate
5440 		 target code.  The job of removing any unreachable
5441 		 code is left to the optimisation phase if the
5442 		 "-O" option is specified.  */
5443 	      for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5444 		if (! tree_int_cst_lt (index_expr, n->low)
5445 		    && ! tree_int_cst_lt (n->high, index_expr))
5446 		  break;
5447 
5448 	      if (n)
5449 		emit_jump (label_rtx (n->code_label));
5450 	      else
5451 		emit_jump (default_label);
5452 	    }
5453 	  else
5454 	    {
5455 	      /* If the index expression is not constant we generate
5456 		 a binary decision tree to select the appropriate
5457 		 target code.  This is done as follows:
5458 
5459 		 The list of cases is rearranged into a binary tree,
5460 		 nearly optimal assuming equal probability for each case.
5461 
5462 		 The tree is transformed into RTL, eliminating
5463 		 redundant test conditions at the same time.
5464 
5465 		 If program flow could reach the end of the
5466 		 decision tree an unconditional jump to the
5467 		 default code is emitted.  */
5468 
5469 	      use_cost_table
5470 		= (TREE_CODE (orig_type) != ENUMERAL_TYPE
5471 		   && estimate_case_costs (thiscase->data.case_stmt.case_list));
5472 	      balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5473 	      emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5474 			       default_label, index_type);
5475 	      emit_jump_if_reachable (default_label);
5476 	    }
5477 	}
5478       else
5479 	{
5480 	  if (! try_casesi (index_type, index_expr, minval, range,
5481 			    table_label, default_label))
5482 	    {
5483 	      index_type = thiscase->data.case_stmt.nominal_type;
5484 
5485 	      /* Index jumptables from zero for suitable values of
5486                  minval to avoid a subtraction.  */
5487 	      if (! optimize_size
5488 		  && compare_tree_int (minval, 0) > 0
5489 		  && compare_tree_int (minval, 3) < 0)
5490 		{
5491 		  minval = integer_zero_node;
5492 		  range = maxval;
5493 		}
5494 
5495 	      if (! try_tablejump (index_type, index_expr, minval, range,
5496 				   table_label, default_label))
5497 		abort ();
5498 	    }
5499 
5500 	  /* Get table of labels to jump to, in order of case index.  */
5501 
5502 	  ncases = tree_low_cst (range, 0) + 1;
5503 	  labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5504 	  memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5505 
5506 	  for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5507 	    {
5508 	      /* Compute the low and high bounds relative to the minimum
5509 		 value since that should fit in a HOST_WIDE_INT while the
5510 		 actual values may not.  */
5511 	      HOST_WIDE_INT i_low
5512 		= tree_low_cst (fold (build (MINUS_EXPR, index_type,
5513 					     n->low, minval)), 1);
5514 	      HOST_WIDE_INT i_high
5515 		= tree_low_cst (fold (build (MINUS_EXPR, index_type,
5516 					     n->high, minval)), 1);
5517 	      HOST_WIDE_INT i;
5518 
5519 	      for (i = i_low; i <= i_high; i ++)
5520 		labelvec[i]
5521 		  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5522 	    }
5523 
5524 	  /* Fill in the gaps with the default.  */
5525 	  for (i = 0; i < ncases; i++)
5526 	    if (labelvec[i] == 0)
5527 	      labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5528 
5529 	  /* Output the table */
5530 	  emit_label (table_label);
5531 
5532 	  if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5533 	    emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5534 						   gen_rtx_LABEL_REF (Pmode, table_label),
5535 						   gen_rtvec_v (ncases, labelvec),
5536 						   const0_rtx, const0_rtx));
5537 	  else
5538 	    emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5539 					      gen_rtvec_v (ncases, labelvec)));
5540 
5541 	  /* If the case insn drops through the table,
5542 	     after the table we must jump to the default-label.
5543 	     Otherwise record no drop-through after the table.  */
5544 #ifdef CASE_DROPS_THROUGH
5545 	  emit_jump (default_label);
5546 #else
5547 	  emit_barrier ();
5548 #endif
5549 	}
5550 
5551       before_case = NEXT_INSN (before_case);
5552       end = get_last_insn ();
5553       if (squeeze_notes (&before_case, &end))
5554 	abort ();
5555       reorder_insns (before_case, end,
5556 		     thiscase->data.case_stmt.start);
5557     }
5558   else
5559     end_cleanup_deferral ();
5560 
5561   if (thiscase->exit_label)
5562     emit_label (thiscase->exit_label);
5563 
5564   POPSTACK (case_stack);
5565 
5566   free_temp_slots ();
5567 }
5568 
5569 /* Convert the tree NODE into a list linked by the right field, with the left
5570    field zeroed.  RIGHT is used for recursion; it is a list to be placed
5571    rightmost in the resulting list.  */
5572 
5573 static struct case_node *
case_tree2list(node,right)5574 case_tree2list (node, right)
5575      struct case_node *node, *right;
5576 {
5577   struct case_node *left;
5578 
5579   if (node->right)
5580     right = case_tree2list (node->right, right);
5581 
5582   node->right = right;
5583   if ((left = node->left))
5584     {
5585       node->left = 0;
5586       return case_tree2list (left, node);
5587     }
5588 
5589   return node;
5590 }
5591 
5592 /* Generate code to jump to LABEL if OP1 and OP2 are equal.  */
5593 
5594 static void
do_jump_if_equal(op1,op2,label,unsignedp)5595 do_jump_if_equal (op1, op2, label, unsignedp)
5596      rtx op1, op2, label;
5597      int unsignedp;
5598 {
5599   if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5600     {
5601       if (INTVAL (op1) == INTVAL (op2))
5602 	emit_jump (label);
5603     }
5604   else
5605     emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5606 			     (GET_MODE (op1) == VOIDmode
5607 			     ? GET_MODE (op2) : GET_MODE (op1)),
5608 			     unsignedp, label);
5609 }
5610 
5611 /* Not all case values are encountered equally.  This function
5612    uses a heuristic to weight case labels, in cases where that
5613    looks like a reasonable thing to do.
5614 
5615    Right now, all we try to guess is text, and we establish the
5616    following weights:
5617 
5618 	chars above space:	16
5619 	digits:			16
5620 	default:		12
5621 	space, punct:		8
5622 	tab:			4
5623 	newline:		2
5624 	other "\" chars:	1
5625 	remaining chars:	0
5626 
5627    If we find any cases in the switch that are not either -1 or in the range
5628    of valid ASCII characters, or are control characters other than those
5629    commonly used with "\", don't treat this switch scanning text.
5630 
5631    Return 1 if these nodes are suitable for cost estimation, otherwise
5632    return 0.  */
5633 
5634 static int
estimate_case_costs(node)5635 estimate_case_costs (node)
5636      case_node_ptr node;
5637 {
5638   tree min_ascii = integer_minus_one_node;
5639   tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5640   case_node_ptr n;
5641   int i;
5642 
5643   /* If we haven't already made the cost table, make it now.  Note that the
5644      lower bound of the table is -1, not zero.  */
5645 
5646   if (! cost_table_initialized)
5647     {
5648       cost_table_initialized = 1;
5649 
5650       for (i = 0; i < 128; i++)
5651 	{
5652 	  if (ISALNUM (i))
5653 	    COST_TABLE (i) = 16;
5654 	  else if (ISPUNCT (i))
5655 	    COST_TABLE (i) = 8;
5656 	  else if (ISCNTRL (i))
5657 	    COST_TABLE (i) = -1;
5658 	}
5659 
5660       COST_TABLE (' ') = 8;
5661       COST_TABLE ('\t') = 4;
5662       COST_TABLE ('\0') = 4;
5663       COST_TABLE ('\n') = 2;
5664       COST_TABLE ('\f') = 1;
5665       COST_TABLE ('\v') = 1;
5666       COST_TABLE ('\b') = 1;
5667     }
5668 
5669   /* See if all the case expressions look like text.  It is text if the
5670      constant is >= -1 and the highest constant is <= 127.  Do all comparisons
5671      as signed arithmetic since we don't want to ever access cost_table with a
5672      value less than -1.  Also check that none of the constants in a range
5673      are strange control characters.  */
5674 
5675   for (n = node; n; n = n->right)
5676     {
5677       if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5678 	return 0;
5679 
5680       for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5681 	   i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5682 	if (COST_TABLE (i) < 0)
5683 	  return 0;
5684     }
5685 
5686   /* All interesting values are within the range of interesting
5687      ASCII characters.  */
5688   return 1;
5689 }
5690 
5691 /* Scan an ordered list of case nodes
5692    combining those with consecutive values or ranges.
5693 
5694    Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
5695 
5696 static void
group_case_nodes(head)5697 group_case_nodes (head)
5698      case_node_ptr head;
5699 {
5700   case_node_ptr node = head;
5701 
5702   while (node)
5703     {
5704       rtx lb = next_real_insn (label_rtx (node->code_label));
5705       rtx lb2;
5706       case_node_ptr np = node;
5707 
5708       /* Try to group the successors of NODE with NODE.  */
5709       while (((np = np->right) != 0)
5710 	     /* Do they jump to the same place?  */
5711 	     && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5712 		 || (lb != 0 && lb2 != 0
5713 		     && simplejump_p (lb)
5714 		     && simplejump_p (lb2)
5715 		     && rtx_equal_p (SET_SRC (PATTERN (lb)),
5716 				     SET_SRC (PATTERN (lb2)))))
5717 	     /* Are their ranges consecutive?  */
5718 	     && tree_int_cst_equal (np->low,
5719 				    fold (build (PLUS_EXPR,
5720 						 TREE_TYPE (node->high),
5721 						 node->high,
5722 						 integer_one_node)))
5723 	     /* An overflow is not consecutive.  */
5724 	     && tree_int_cst_lt (node->high,
5725 				 fold (build (PLUS_EXPR,
5726 					      TREE_TYPE (node->high),
5727 					      node->high,
5728 					      integer_one_node))))
5729 	{
5730 	  node->high = np->high;
5731 	}
5732       /* NP is the first node after NODE which can't be grouped with it.
5733 	 Delete the nodes in between, and move on to that node.  */
5734       node->right = np;
5735       node = np;
5736     }
5737 }
5738 
5739 /* Take an ordered list of case nodes
5740    and transform them into a near optimal binary tree,
5741    on the assumption that any target code selection value is as
5742    likely as any other.
5743 
5744    The transformation is performed by splitting the ordered
5745    list into two equal sections plus a pivot.  The parts are
5746    then attached to the pivot as left and right branches.  Each
5747    branch is then transformed recursively.  */
5748 
5749 static void
balance_case_nodes(head,parent)5750 balance_case_nodes (head, parent)
5751      case_node_ptr *head;
5752      case_node_ptr parent;
5753 {
5754   case_node_ptr np;
5755 
5756   np = *head;
5757   if (np)
5758     {
5759       int cost = 0;
5760       int i = 0;
5761       int ranges = 0;
5762       case_node_ptr *npp;
5763       case_node_ptr left;
5764 
5765       /* Count the number of entries on branch.  Also count the ranges.  */
5766 
5767       while (np)
5768 	{
5769 	  if (!tree_int_cst_equal (np->low, np->high))
5770 	    {
5771 	      ranges++;
5772 	      if (use_cost_table)
5773 		cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5774 	    }
5775 
5776 	  if (use_cost_table)
5777 	    cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5778 
5779 	  i++;
5780 	  np = np->right;
5781 	}
5782 
5783       if (i > 2)
5784 	{
5785 	  /* Split this list if it is long enough for that to help.  */
5786 	  npp = head;
5787 	  left = *npp;
5788 	  if (use_cost_table)
5789 	    {
5790 	      /* Find the place in the list that bisects the list's total cost,
5791 		 Here I gets half the total cost.  */
5792 	      int n_moved = 0;
5793 	      i = (cost + 1) / 2;
5794 	      while (1)
5795 		{
5796 		  /* Skip nodes while their cost does not reach that amount.  */
5797 		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5798 		    i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5799 		  i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5800 		  if (i <= 0)
5801 		    break;
5802 		  npp = &(*npp)->right;
5803 		  n_moved += 1;
5804 		}
5805 	      if (n_moved == 0)
5806 		{
5807 		  /* Leave this branch lopsided, but optimize left-hand
5808 		     side and fill in `parent' fields for right-hand side.  */
5809 		  np = *head;
5810 		  np->parent = parent;
5811 		  balance_case_nodes (&np->left, np);
5812 		  for (; np->right; np = np->right)
5813 		    np->right->parent = np;
5814 		  return;
5815 		}
5816 	    }
5817 	  /* If there are just three nodes, split at the middle one.  */
5818 	  else if (i == 3)
5819 	    npp = &(*npp)->right;
5820 	  else
5821 	    {
5822 	      /* Find the place in the list that bisects the list's total cost,
5823 		 where ranges count as 2.
5824 		 Here I gets half the total cost.  */
5825 	      i = (i + ranges + 1) / 2;
5826 	      while (1)
5827 		{
5828 		  /* Skip nodes while their cost does not reach that amount.  */
5829 		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5830 		    i--;
5831 		  i--;
5832 		  if (i <= 0)
5833 		    break;
5834 		  npp = &(*npp)->right;
5835 		}
5836 	    }
5837 	  *head = np = *npp;
5838 	  *npp = 0;
5839 	  np->parent = parent;
5840 	  np->left = left;
5841 
5842 	  /* Optimize each of the two split parts.  */
5843 	  balance_case_nodes (&np->left, np);
5844 	  balance_case_nodes (&np->right, np);
5845 	}
5846       else
5847 	{
5848 	  /* Else leave this branch as one level,
5849 	     but fill in `parent' fields.  */
5850 	  np = *head;
5851 	  np->parent = parent;
5852 	  for (; np->right; np = np->right)
5853 	    np->right->parent = np;
5854 	}
5855     }
5856 }
5857 
5858 /* Search the parent sections of the case node tree
5859    to see if a test for the lower bound of NODE would be redundant.
5860    INDEX_TYPE is the type of the index expression.
5861 
5862    The instructions to generate the case decision tree are
5863    output in the same order as nodes are processed so it is
5864    known that if a parent node checks the range of the current
5865    node minus one that the current node is bounded at its lower
5866    span.  Thus the test would be redundant.  */
5867 
5868 static int
node_has_low_bound(node,index_type)5869 node_has_low_bound (node, index_type)
5870      case_node_ptr node;
5871      tree index_type;
5872 {
5873   tree low_minus_one;
5874   case_node_ptr pnode;
5875 
5876   /* If the lower bound of this node is the lowest value in the index type,
5877      we need not test it.  */
5878 
5879   if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5880     return 1;
5881 
5882   /* If this node has a left branch, the value at the left must be less
5883      than that at this node, so it cannot be bounded at the bottom and
5884      we need not bother testing any further.  */
5885 
5886   if (node->left)
5887     return 0;
5888 
5889   low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5890 			       node->low, integer_one_node));
5891 
5892   /* If the subtraction above overflowed, we can't verify anything.
5893      Otherwise, look for a parent that tests our value - 1.  */
5894 
5895   if (! tree_int_cst_lt (low_minus_one, node->low))
5896     return 0;
5897 
5898   for (pnode = node->parent; pnode; pnode = pnode->parent)
5899     if (tree_int_cst_equal (low_minus_one, pnode->high))
5900       return 1;
5901 
5902   return 0;
5903 }
5904 
5905 /* Search the parent sections of the case node tree
5906    to see if a test for the upper bound of NODE would be redundant.
5907    INDEX_TYPE is the type of the index expression.
5908 
5909    The instructions to generate the case decision tree are
5910    output in the same order as nodes are processed so it is
5911    known that if a parent node checks the range of the current
5912    node plus one that the current node is bounded at its upper
5913    span.  Thus the test would be redundant.  */
5914 
5915 static int
node_has_high_bound(node,index_type)5916 node_has_high_bound (node, index_type)
5917      case_node_ptr node;
5918      tree index_type;
5919 {
5920   tree high_plus_one;
5921   case_node_ptr pnode;
5922 
5923   /* If there is no upper bound, obviously no test is needed.  */
5924 
5925   if (TYPE_MAX_VALUE (index_type) == NULL)
5926     return 1;
5927 
5928   /* If the upper bound of this node is the highest value in the type
5929      of the index expression, we need not test against it.  */
5930 
5931   if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5932     return 1;
5933 
5934   /* If this node has a right branch, the value at the right must be greater
5935      than that at this node, so it cannot be bounded at the top and
5936      we need not bother testing any further.  */
5937 
5938   if (node->right)
5939     return 0;
5940 
5941   high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5942 			       node->high, integer_one_node));
5943 
5944   /* If the addition above overflowed, we can't verify anything.
5945      Otherwise, look for a parent that tests our value + 1.  */
5946 
5947   if (! tree_int_cst_lt (node->high, high_plus_one))
5948     return 0;
5949 
5950   for (pnode = node->parent; pnode; pnode = pnode->parent)
5951     if (tree_int_cst_equal (high_plus_one, pnode->low))
5952       return 1;
5953 
5954   return 0;
5955 }
5956 
5957 /* Search the parent sections of the
5958    case node tree to see if both tests for the upper and lower
5959    bounds of NODE would be redundant.  */
5960 
5961 static int
node_is_bounded(node,index_type)5962 node_is_bounded (node, index_type)
5963      case_node_ptr node;
5964      tree index_type;
5965 {
5966   return (node_has_low_bound (node, index_type)
5967 	  && node_has_high_bound (node, index_type));
5968 }
5969 
5970 /*  Emit an unconditional jump to LABEL unless it would be dead code.  */
5971 
5972 static void
emit_jump_if_reachable(label)5973 emit_jump_if_reachable (label)
5974      rtx label;
5975 {
5976   if (GET_CODE (get_last_insn ()) != BARRIER)
5977     emit_jump (label);
5978 }
5979 
5980 /* Emit step-by-step code to select a case for the value of INDEX.
5981    The thus generated decision tree follows the form of the
5982    case-node binary tree NODE, whose nodes represent test conditions.
5983    INDEX_TYPE is the type of the index of the switch.
5984 
5985    Care is taken to prune redundant tests from the decision tree
5986    by detecting any boundary conditions already checked by
5987    emitted rtx.  (See node_has_high_bound, node_has_low_bound
5988    and node_is_bounded, above.)
5989 
5990    Where the test conditions can be shown to be redundant we emit
5991    an unconditional jump to the target code.  As a further
5992    optimization, the subordinates of a tree node are examined to
5993    check for bounded nodes.  In this case conditional and/or
5994    unconditional jumps as a result of the boundary check for the
5995    current node are arranged to target the subordinates associated
5996    code for out of bound conditions on the current node.
5997 
5998    We can assume that when control reaches the code generated here,
5999    the index value has already been compared with the parents
6000    of this node, and determined to be on the same side of each parent
6001    as this node is.  Thus, if this node tests for the value 51,
6002    and a parent tested for 52, we don't need to consider
6003    the possibility of a value greater than 51.  If another parent
6004    tests for the value 50, then this node need not test anything.  */
6005 
6006 static void
emit_case_nodes(index,node,default_label,index_type)6007 emit_case_nodes (index, node, default_label, index_type)
6008      rtx index;
6009      case_node_ptr node;
6010      rtx default_label;
6011      tree index_type;
6012 {
6013   /* If INDEX has an unsigned type, we must make unsigned branches.  */
6014   int unsignedp = TREE_UNSIGNED (index_type);
6015   enum machine_mode mode = GET_MODE (index);
6016   enum machine_mode imode = TYPE_MODE (index_type);
6017 
6018   /* See if our parents have already tested everything for us.
6019      If they have, emit an unconditional jump for this node.  */
6020   if (node_is_bounded (node, index_type))
6021     emit_jump (label_rtx (node->code_label));
6022 
6023   else if (tree_int_cst_equal (node->low, node->high))
6024     {
6025       /* Node is single valued.  First see if the index expression matches
6026 	 this node and then check our children, if any.  */
6027 
6028       do_jump_if_equal (index,
6029 			convert_modes (mode, imode,
6030 				       expand_expr (node->low, NULL_RTX,
6031 						    VOIDmode, 0),
6032 				       unsignedp),
6033 			label_rtx (node->code_label), unsignedp);
6034 
6035       if (node->right != 0 && node->left != 0)
6036 	{
6037 	  /* This node has children on both sides.
6038 	     Dispatch to one side or the other
6039 	     by comparing the index value with this node's value.
6040 	     If one subtree is bounded, check that one first,
6041 	     so we can avoid real branches in the tree.  */
6042 
6043 	  if (node_is_bounded (node->right, index_type))
6044 	    {
6045 	      emit_cmp_and_jump_insns (index,
6046 				       convert_modes
6047 				       (mode, imode,
6048 					expand_expr (node->high, NULL_RTX,
6049 						     VOIDmode, 0),
6050 					unsignedp),
6051 				       GT, NULL_RTX, mode, unsignedp,
6052 				       label_rtx (node->right->code_label));
6053 	      emit_case_nodes (index, node->left, default_label, index_type);
6054 	    }
6055 
6056 	  else if (node_is_bounded (node->left, index_type))
6057 	    {
6058 	      emit_cmp_and_jump_insns (index,
6059 				       convert_modes
6060 				       (mode, imode,
6061 					expand_expr (node->high, NULL_RTX,
6062 						     VOIDmode, 0),
6063 					unsignedp),
6064 				       LT, NULL_RTX, mode, unsignedp,
6065 				       label_rtx (node->left->code_label));
6066 	      emit_case_nodes (index, node->right, default_label, index_type);
6067 	    }
6068 
6069 	  else
6070 	    {
6071 	      /* Neither node is bounded.  First distinguish the two sides;
6072 		 then emit the code for one side at a time.  */
6073 
6074 	      tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6075 
6076 	      /* See if the value is on the right.  */
6077 	      emit_cmp_and_jump_insns (index,
6078 				       convert_modes
6079 				       (mode, imode,
6080 					expand_expr (node->high, NULL_RTX,
6081 						     VOIDmode, 0),
6082 					unsignedp),
6083 				       GT, NULL_RTX, mode, unsignedp,
6084 				       label_rtx (test_label));
6085 
6086 	      /* Value must be on the left.
6087 		 Handle the left-hand subtree.  */
6088 	      emit_case_nodes (index, node->left, default_label, index_type);
6089 	      /* If left-hand subtree does nothing,
6090 		 go to default.  */
6091 	      emit_jump_if_reachable (default_label);
6092 
6093 	      /* Code branches here for the right-hand subtree.  */
6094 	      expand_label (test_label);
6095 	      emit_case_nodes (index, node->right, default_label, index_type);
6096 	    }
6097 	}
6098 
6099       else if (node->right != 0 && node->left == 0)
6100 	{
6101 	  /* Here we have a right child but no left so we issue conditional
6102 	     branch to default and process the right child.
6103 
6104 	     Omit the conditional branch to default if we it avoid only one
6105 	     right child; it costs too much space to save so little time.  */
6106 
6107 	  if (node->right->right || node->right->left
6108 	      || !tree_int_cst_equal (node->right->low, node->right->high))
6109 	    {
6110 	      if (!node_has_low_bound (node, index_type))
6111 		{
6112 		  emit_cmp_and_jump_insns (index,
6113 					   convert_modes
6114 					   (mode, imode,
6115 					    expand_expr (node->high, NULL_RTX,
6116 							 VOIDmode, 0),
6117 					    unsignedp),
6118 					   LT, NULL_RTX, mode, unsignedp,
6119 					   default_label);
6120 		}
6121 
6122 	      emit_case_nodes (index, node->right, default_label, index_type);
6123 	    }
6124 	  else
6125 	    /* We cannot process node->right normally
6126 	       since we haven't ruled out the numbers less than
6127 	       this node's value.  So handle node->right explicitly.  */
6128 	    do_jump_if_equal (index,
6129 			      convert_modes
6130 			      (mode, imode,
6131 			       expand_expr (node->right->low, NULL_RTX,
6132 					    VOIDmode, 0),
6133 			       unsignedp),
6134 			      label_rtx (node->right->code_label), unsignedp);
6135 	}
6136 
6137       else if (node->right == 0 && node->left != 0)
6138 	{
6139 	  /* Just one subtree, on the left.  */
6140 	  if (node->left->left || node->left->right
6141 	      || !tree_int_cst_equal (node->left->low, node->left->high))
6142 	    {
6143 	      if (!node_has_high_bound (node, index_type))
6144 		{
6145 		  emit_cmp_and_jump_insns (index,
6146 					   convert_modes
6147 					   (mode, imode,
6148 					    expand_expr (node->high, NULL_RTX,
6149 							 VOIDmode, 0),
6150 					    unsignedp),
6151 					   GT, NULL_RTX, mode, unsignedp,
6152 					   default_label);
6153 		}
6154 
6155 	      emit_case_nodes (index, node->left, default_label, index_type);
6156 	    }
6157 	  else
6158 	    /* We cannot process node->left normally
6159 	       since we haven't ruled out the numbers less than
6160 	       this node's value.  So handle node->left explicitly.  */
6161 	    do_jump_if_equal (index,
6162 			      convert_modes
6163 			      (mode, imode,
6164 			       expand_expr (node->left->low, NULL_RTX,
6165 					    VOIDmode, 0),
6166 			       unsignedp),
6167 			      label_rtx (node->left->code_label), unsignedp);
6168 	}
6169     }
6170   else
6171     {
6172       /* Node is a range.  These cases are very similar to those for a single
6173 	 value, except that we do not start by testing whether this node
6174 	 is the one to branch to.  */
6175 
6176       if (node->right != 0 && node->left != 0)
6177 	{
6178 	  /* Node has subtrees on both sides.
6179 	     If the right-hand subtree is bounded,
6180 	     test for it first, since we can go straight there.
6181 	     Otherwise, we need to make a branch in the control structure,
6182 	     then handle the two subtrees.  */
6183 	  tree test_label = 0;
6184 
6185 	  if (node_is_bounded (node->right, index_type))
6186 	    /* Right hand node is fully bounded so we can eliminate any
6187 	       testing and branch directly to the target code.  */
6188 	    emit_cmp_and_jump_insns (index,
6189 				     convert_modes
6190 				     (mode, imode,
6191 				      expand_expr (node->high, NULL_RTX,
6192 						   VOIDmode, 0),
6193 				      unsignedp),
6194 				     GT, NULL_RTX, mode, unsignedp,
6195 				     label_rtx (node->right->code_label));
6196 	  else
6197 	    {
6198 	      /* Right hand node requires testing.
6199 		 Branch to a label where we will handle it later.  */
6200 
6201 	      test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6202 	      emit_cmp_and_jump_insns (index,
6203 				       convert_modes
6204 				       (mode, imode,
6205 					expand_expr (node->high, NULL_RTX,
6206 						     VOIDmode, 0),
6207 					unsignedp),
6208 				       GT, NULL_RTX, mode, unsignedp,
6209 				       label_rtx (test_label));
6210 	    }
6211 
6212 	  /* Value belongs to this node or to the left-hand subtree.  */
6213 
6214 	  emit_cmp_and_jump_insns (index,
6215 				   convert_modes
6216 				   (mode, imode,
6217 				    expand_expr (node->low, NULL_RTX,
6218 						 VOIDmode, 0),
6219 				    unsignedp),
6220 				   GE, NULL_RTX, mode, unsignedp,
6221 				   label_rtx (node->code_label));
6222 
6223 	  /* Handle the left-hand subtree.  */
6224 	  emit_case_nodes (index, node->left, default_label, index_type);
6225 
6226 	  /* If right node had to be handled later, do that now.  */
6227 
6228 	  if (test_label)
6229 	    {
6230 	      /* If the left-hand subtree fell through,
6231 		 don't let it fall into the right-hand subtree.  */
6232 	      emit_jump_if_reachable (default_label);
6233 
6234 	      expand_label (test_label);
6235 	      emit_case_nodes (index, node->right, default_label, index_type);
6236 	    }
6237 	}
6238 
6239       else if (node->right != 0 && node->left == 0)
6240 	{
6241 	  /* Deal with values to the left of this node,
6242 	     if they are possible.  */
6243 	  if (!node_has_low_bound (node, index_type))
6244 	    {
6245 	      emit_cmp_and_jump_insns (index,
6246 				       convert_modes
6247 				       (mode, imode,
6248 					expand_expr (node->low, NULL_RTX,
6249 						     VOIDmode, 0),
6250 					unsignedp),
6251 				       LT, NULL_RTX, mode, unsignedp,
6252 				       default_label);
6253 	    }
6254 
6255 	  /* Value belongs to this node or to the right-hand subtree.  */
6256 
6257 	  emit_cmp_and_jump_insns (index,
6258 				   convert_modes
6259 				   (mode, imode,
6260 				    expand_expr (node->high, NULL_RTX,
6261 						 VOIDmode, 0),
6262 				    unsignedp),
6263 				   LE, NULL_RTX, mode, unsignedp,
6264 				   label_rtx (node->code_label));
6265 
6266 	  emit_case_nodes (index, node->right, default_label, index_type);
6267 	}
6268 
6269       else if (node->right == 0 && node->left != 0)
6270 	{
6271 	  /* Deal with values to the right of this node,
6272 	     if they are possible.  */
6273 	  if (!node_has_high_bound (node, index_type))
6274 	    {
6275 	      emit_cmp_and_jump_insns (index,
6276 				       convert_modes
6277 				       (mode, imode,
6278 					expand_expr (node->high, NULL_RTX,
6279 						     VOIDmode, 0),
6280 					unsignedp),
6281 				       GT, NULL_RTX, mode, unsignedp,
6282 				       default_label);
6283 	    }
6284 
6285 	  /* Value belongs to this node or to the left-hand subtree.  */
6286 
6287 	  emit_cmp_and_jump_insns (index,
6288 				   convert_modes
6289 				   (mode, imode,
6290 				    expand_expr (node->low, NULL_RTX,
6291 						 VOIDmode, 0),
6292 				    unsignedp),
6293 				   GE, NULL_RTX, mode, unsignedp,
6294 				   label_rtx (node->code_label));
6295 
6296 	  emit_case_nodes (index, node->left, default_label, index_type);
6297 	}
6298 
6299       else
6300 	{
6301 	  /* Node has no children so we check low and high bounds to remove
6302 	     redundant tests.  Only one of the bounds can exist,
6303 	     since otherwise this node is bounded--a case tested already.  */
6304 	  int high_bound = node_has_high_bound (node, index_type);
6305 	  int low_bound = node_has_low_bound (node, index_type);
6306 
6307 	  if (!high_bound && low_bound)
6308 	    {
6309 	      emit_cmp_and_jump_insns (index,
6310 				       convert_modes
6311 				       (mode, imode,
6312 					expand_expr (node->high, NULL_RTX,
6313 						     VOIDmode, 0),
6314 					unsignedp),
6315 				       GT, NULL_RTX, mode, unsignedp,
6316 				       default_label);
6317 	    }
6318 
6319 	  else if (!low_bound && high_bound)
6320 	    {
6321 	      emit_cmp_and_jump_insns (index,
6322 				       convert_modes
6323 				       (mode, imode,
6324 					expand_expr (node->low, NULL_RTX,
6325 						     VOIDmode, 0),
6326 					unsignedp),
6327 				       LT, NULL_RTX, mode, unsignedp,
6328 				       default_label);
6329 	    }
6330 	  else if (!low_bound && !high_bound)
6331 	    {
6332 	      /* Widen LOW and HIGH to the same width as INDEX.  */
6333 	      tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6334 	      tree low = build1 (CONVERT_EXPR, type, node->low);
6335 	      tree high = build1 (CONVERT_EXPR, type, node->high);
6336 	      rtx low_rtx, new_index, new_bound;
6337 
6338 	      /* Instead of doing two branches, emit one unsigned branch for
6339 		 (index-low) > (high-low).  */
6340 	      low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6341 	      new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6342 					       NULL_RTX, unsignedp,
6343 					       OPTAB_WIDEN);
6344 	      new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6345 						    high, low)),
6346 				       NULL_RTX, mode, 0);
6347 
6348 	      emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6349 				       mode, 1, default_label);
6350 	    }
6351 
6352 	  emit_jump (label_rtx (node->code_label));
6353 	}
6354     }
6355 }
6356 
6357 #include "gt-stmt.h"
6358