xref: /openbsd/gnu/gcc/gcc/stmt.c (revision 404b540a)
1 /* Expands front end tree to back end RTL for GCC
2    Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3    1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA.  */
22 
23 /* This file handles the generation of rtl code from tree structure
24    above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
25    The functions whose names start with `expand_' are called by the
26    expander to generate RTL instructions for various kinds of constructs.  */
27 
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "tm.h"
32 
33 #include "rtl.h"
34 #include "hard-reg-set.h"
35 #include "tree.h"
36 #include "tm_p.h"
37 #include "flags.h"
38 #include "except.h"
39 #include "function.h"
40 #include "insn-config.h"
41 #include "expr.h"
42 #include "libfuncs.h"
43 #include "recog.h"
44 #include "machmode.h"
45 #include "toplev.h"
46 #include "output.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "predict.h"
50 #include "optabs.h"
51 #include "target.h"
52 #include "regs.h"
53 
54 /* Functions and data structures for expanding case statements.  */
55 
56 /* Case label structure, used to hold info on labels within case
57    statements.  We handle "range" labels; for a single-value label
58    as in C, the high and low limits are the same.
59 
60    We start with a vector of case nodes sorted in ascending order, and
61    the default label as the last element in the vector.  Before expanding
62    to RTL, we transform this vector into a list linked via the RIGHT
63    fields in the case_node struct.  Nodes with higher case values are
64    later in the list.
65 
66    Switch statements can be output in three forms.  A branch table is
67    used if there are more than a few labels and the labels are dense
68    within the range between the smallest and largest case value.  If a
69    branch table is used, no further manipulations are done with the case
70    node chain.
71 
72    The alternative to the use of a branch table is to generate a series
73    of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
74    and PARENT fields to hold a binary tree.  Initially the tree is
75    totally unbalanced, with everything on the right.  We balance the tree
76    with nodes on the left having lower case values than the parent
77    and nodes on the right having higher values.  We then output the tree
78    in order.
79 
80    For very small, suitable switch statements, we can generate a series
81    of simple bit test and branches instead.  */
82 
83 struct case_node GTY(())
84 {
85   struct case_node	*left;	/* Left son in binary tree */
86   struct case_node	*right;	/* Right son in binary tree; also node chain */
87   struct case_node	*parent; /* Parent of node in binary tree */
88   tree			low;	/* Lowest index value for this label */
89   tree			high;	/* Highest index value for this label */
90   tree			code_label; /* Label to jump to when node matches */
91 };
92 
93 typedef struct case_node case_node;
94 typedef struct case_node *case_node_ptr;
95 
96 /* These are used by estimate_case_costs and balance_case_nodes.  */
97 
98 /* This must be a signed type, and non-ANSI compilers lack signed char.  */
99 static short cost_table_[129];
100 static int use_cost_table;
101 static int cost_table_initialized;
102 
103 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
104    is unsigned.  */
105 #define COST_TABLE(I)  cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
106 
107 static int n_occurrences (int, const char *);
108 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
109 static void expand_nl_goto_receiver (void);
110 static bool check_operand_nalternatives (tree, tree);
111 static bool check_unique_operand_names (tree, tree);
112 static char *resolve_operand_name_1 (char *, tree, tree);
113 static void expand_null_return_1 (void);
114 static void expand_value_return (rtx);
115 static int estimate_case_costs (case_node_ptr);
116 static bool lshift_cheap_p (void);
117 static int case_bit_test_cmp (const void *, const void *);
118 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
119 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
120 static int node_has_low_bound (case_node_ptr, tree);
121 static int node_has_high_bound (case_node_ptr, tree);
122 static int node_is_bounded (case_node_ptr, tree);
123 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
124 static struct case_node *add_case_node (struct case_node *, tree,
125 					tree, tree, tree);
126 
127 
128 /* Return the rtx-label that corresponds to a LABEL_DECL,
129    creating it if necessary.  */
130 
131 rtx
label_rtx(tree label)132 label_rtx (tree label)
133 {
134   gcc_assert (TREE_CODE (label) == LABEL_DECL);
135 
136   if (!DECL_RTL_SET_P (label))
137     {
138       rtx r = gen_label_rtx ();
139       SET_DECL_RTL (label, r);
140       if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
141 	LABEL_PRESERVE_P (r) = 1;
142     }
143 
144   return DECL_RTL (label);
145 }
146 
147 /* As above, but also put it on the forced-reference list of the
148    function that contains it.  */
149 rtx
force_label_rtx(tree label)150 force_label_rtx (tree label)
151 {
152   rtx ref = label_rtx (label);
153   tree function = decl_function_context (label);
154   struct function *p;
155 
156   gcc_assert (function);
157 
158   if (function != current_function_decl)
159     p = find_function_data (function);
160   else
161     p = cfun;
162 
163   p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
164 						p->expr->x_forced_labels);
165   return ref;
166 }
167 
168 /* Add an unconditional jump to LABEL as the next sequential instruction.  */
169 
170 void
emit_jump(rtx label)171 emit_jump (rtx label)
172 {
173   do_pending_stack_adjust ();
174   emit_jump_insn (gen_jump (label));
175   emit_barrier ();
176 }
177 
178 /* Emit code to jump to the address
179    specified by the pointer expression EXP.  */
180 
181 void
expand_computed_goto(tree exp)182 expand_computed_goto (tree exp)
183 {
184   rtx x = expand_normal (exp);
185 
186   x = convert_memory_address (Pmode, x);
187 
188   do_pending_stack_adjust ();
189   emit_indirect_jump (x);
190 }
191 
192 /* Handle goto statements and the labels that they can go to.  */
193 
194 /* Specify the location in the RTL code of a label LABEL,
195    which is a LABEL_DECL tree node.
196 
197    This is used for the kind of label that the user can jump to with a
198    goto statement, and for alternatives of a switch or case statement.
199    RTL labels generated for loops and conditionals don't go through here;
200    they are generated directly at the RTL level, by other functions below.
201 
202    Note that this has nothing to do with defining label *names*.
203    Languages vary in how they do that and what that even means.  */
204 
205 void
expand_label(tree label)206 expand_label (tree label)
207 {
208   rtx label_r = label_rtx (label);
209 
210   do_pending_stack_adjust ();
211   emit_label (label_r);
212   if (DECL_NAME (label))
213     LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
214 
215   if (DECL_NONLOCAL (label))
216     {
217       expand_nl_goto_receiver ();
218       nonlocal_goto_handler_labels
219 	= gen_rtx_EXPR_LIST (VOIDmode, label_r,
220 			     nonlocal_goto_handler_labels);
221     }
222 
223   if (FORCED_LABEL (label))
224     forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
225 
226   if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
227     maybe_set_first_label_num (label_r);
228 }
229 
230 /* Generate RTL code for a `goto' statement with target label LABEL.
231    LABEL should be a LABEL_DECL tree node that was or will later be
232    defined with `expand_label'.  */
233 
234 void
expand_goto(tree label)235 expand_goto (tree label)
236 {
237 #ifdef ENABLE_CHECKING
238   /* Check for a nonlocal goto to a containing function.  Should have
239      gotten translated to __builtin_nonlocal_goto.  */
240   tree context = decl_function_context (label);
241   gcc_assert (!context || context == current_function_decl);
242 #endif
243 
244   emit_jump (label_rtx (label));
245 }
246 
247 /* Return the number of times character C occurs in string S.  */
248 static int
n_occurrences(int c,const char * s)249 n_occurrences (int c, const char *s)
250 {
251   int n = 0;
252   while (*s)
253     n += (*s++ == c);
254   return n;
255 }
256 
257 /* Generate RTL for an asm statement (explicit assembler code).
258    STRING is a STRING_CST node containing the assembler code text,
259    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
260    insn is volatile; don't optimize it.  */
261 
262 static void
expand_asm(tree string,int vol)263 expand_asm (tree string, int vol)
264 {
265   rtx body;
266 
267   if (TREE_CODE (string) == ADDR_EXPR)
268     string = TREE_OPERAND (string, 0);
269 
270   body = gen_rtx_ASM_INPUT (VOIDmode,
271 			    ggc_strdup (TREE_STRING_POINTER (string)));
272 
273   MEM_VOLATILE_P (body) = vol;
274 
275   emit_insn (body);
276 }
277 
278 /* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
279    OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
280    inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
281    *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
282    memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
283    constraint allows the use of a register operand.  And, *IS_INOUT
284    will be true if the operand is read-write, i.e., if it is used as
285    an input as well as an output.  If *CONSTRAINT_P is not in
286    canonical form, it will be made canonical.  (Note that `+' will be
287    replaced with `=' as part of this process.)
288 
289    Returns TRUE if all went well; FALSE if an error occurred.  */
290 
291 bool
parse_output_constraint(const char ** constraint_p,int operand_num,int ninputs,int noutputs,bool * allows_mem,bool * allows_reg,bool * is_inout)292 parse_output_constraint (const char **constraint_p, int operand_num,
293 			 int ninputs, int noutputs, bool *allows_mem,
294 			 bool *allows_reg, bool *is_inout)
295 {
296   const char *constraint = *constraint_p;
297   const char *p;
298 
299   /* Assume the constraint doesn't allow the use of either a register
300      or memory.  */
301   *allows_mem = false;
302   *allows_reg = false;
303 
304   /* Allow the `=' or `+' to not be at the beginning of the string,
305      since it wasn't explicitly documented that way, and there is a
306      large body of code that puts it last.  Swap the character to
307      the front, so as not to uglify any place else.  */
308   p = strchr (constraint, '=');
309   if (!p)
310     p = strchr (constraint, '+');
311 
312   /* If the string doesn't contain an `=', issue an error
313      message.  */
314   if (!p)
315     {
316       error ("output operand constraint lacks %<=%>");
317       return false;
318     }
319 
320   /* If the constraint begins with `+', then the operand is both read
321      from and written to.  */
322   *is_inout = (*p == '+');
323 
324   /* Canonicalize the output constraint so that it begins with `='.  */
325   if (p != constraint || *is_inout)
326     {
327       char *buf;
328       size_t c_len = strlen (constraint);
329 
330       if (p != constraint)
331 	warning (0, "output constraint %qc for operand %d "
332 		 "is not at the beginning",
333 		 *p, operand_num);
334 
335       /* Make a copy of the constraint.  */
336       buf = alloca (c_len + 1);
337       strcpy (buf, constraint);
338       /* Swap the first character and the `=' or `+'.  */
339       buf[p - constraint] = buf[0];
340       /* Make sure the first character is an `='.  (Until we do this,
341 	 it might be a `+'.)  */
342       buf[0] = '=';
343       /* Replace the constraint with the canonicalized string.  */
344       *constraint_p = ggc_alloc_string (buf, c_len);
345       constraint = *constraint_p;
346     }
347 
348   /* Loop through the constraint string.  */
349   for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
350     switch (*p)
351       {
352       case '+':
353       case '=':
354 	error ("operand constraint contains incorrectly positioned "
355 	       "%<+%> or %<=%>");
356 	return false;
357 
358       case '%':
359 	if (operand_num + 1 == ninputs + noutputs)
360 	  {
361 	    error ("%<%%%> constraint used with last operand");
362 	    return false;
363 	  }
364 	break;
365 
366       case 'V':  case 'm':  case 'o':
367 	*allows_mem = true;
368 	break;
369 
370       case '?':  case '!':  case '*':  case '&':  case '#':
371       case 'E':  case 'F':  case 'G':  case 'H':
372       case 's':  case 'i':  case 'n':
373       case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
374       case 'N':  case 'O':  case 'P':  case ',':
375 	break;
376 
377       case '0':  case '1':  case '2':  case '3':  case '4':
378       case '5':  case '6':  case '7':  case '8':  case '9':
379       case '[':
380 	error ("matching constraint not valid in output operand");
381 	return false;
382 
383       case '<':  case '>':
384 	/* ??? Before flow, auto inc/dec insns are not supposed to exist,
385 	   excepting those that expand_call created.  So match memory
386 	   and hope.  */
387 	*allows_mem = true;
388 	break;
389 
390       case 'g':  case 'X':
391 	*allows_reg = true;
392 	*allows_mem = true;
393 	break;
394 
395       case 'p': case 'r':
396 	*allows_reg = true;
397 	break;
398 
399       default:
400 	if (!ISALPHA (*p))
401 	  break;
402 	if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
403 	  *allows_reg = true;
404 #ifdef EXTRA_CONSTRAINT_STR
405 	else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
406 	  *allows_reg = true;
407 	else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
408 	  *allows_mem = true;
409 	else
410 	  {
411 	    /* Otherwise we can't assume anything about the nature of
412 	       the constraint except that it isn't purely registers.
413 	       Treat it like "g" and hope for the best.  */
414 	    *allows_reg = true;
415 	    *allows_mem = true;
416 	  }
417 #endif
418 	break;
419       }
420 
421   return true;
422 }
423 
424 /* Similar, but for input constraints.  */
425 
426 bool
parse_input_constraint(const char ** constraint_p,int input_num,int ninputs,int noutputs,int ninout,const char * const * constraints,bool * allows_mem,bool * allows_reg)427 parse_input_constraint (const char **constraint_p, int input_num,
428 			int ninputs, int noutputs, int ninout,
429 			const char * const * constraints,
430 			bool *allows_mem, bool *allows_reg)
431 {
432   const char *constraint = *constraint_p;
433   const char *orig_constraint = constraint;
434   size_t c_len = strlen (constraint);
435   size_t j;
436   bool saw_match = false;
437 
438   /* Assume the constraint doesn't allow the use of either
439      a register or memory.  */
440   *allows_mem = false;
441   *allows_reg = false;
442 
443   /* Make sure constraint has neither `=', `+', nor '&'.  */
444 
445   for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
446     switch (constraint[j])
447       {
448       case '+':  case '=':  case '&':
449 	if (constraint == orig_constraint)
450 	  {
451 	    error ("input operand constraint contains %qc", constraint[j]);
452 	    return false;
453 	  }
454 	break;
455 
456       case '%':
457 	if (constraint == orig_constraint
458 	    && input_num + 1 == ninputs - ninout)
459 	  {
460 	    error ("%<%%%> constraint used with last operand");
461 	    return false;
462 	  }
463 	break;
464 
465       case 'V':  case 'm':  case 'o':
466 	*allows_mem = true;
467 	break;
468 
469       case '<':  case '>':
470       case '?':  case '!':  case '*':  case '#':
471       case 'E':  case 'F':  case 'G':  case 'H':
472       case 's':  case 'i':  case 'n':
473       case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
474       case 'N':  case 'O':  case 'P':  case ',':
475 	break;
476 
477 	/* Whether or not a numeric constraint allows a register is
478 	   decided by the matching constraint, and so there is no need
479 	   to do anything special with them.  We must handle them in
480 	   the default case, so that we don't unnecessarily force
481 	   operands to memory.  */
482       case '0':  case '1':  case '2':  case '3':  case '4':
483       case '5':  case '6':  case '7':  case '8':  case '9':
484 	{
485 	  char *end;
486 	  unsigned long match;
487 
488 	  saw_match = true;
489 
490 	  match = strtoul (constraint + j, &end, 10);
491 	  if (match >= (unsigned long) noutputs)
492 	    {
493 	      error ("matching constraint references invalid operand number");
494 	      return false;
495 	    }
496 
497 	  /* Try and find the real constraint for this dup.  Only do this
498 	     if the matching constraint is the only alternative.  */
499 	  if (*end == '\0'
500 	      && (j == 0 || (j == 1 && constraint[0] == '%')))
501 	    {
502 	      constraint = constraints[match];
503 	      *constraint_p = constraint;
504 	      c_len = strlen (constraint);
505 	      j = 0;
506 	      /* ??? At the end of the loop, we will skip the first part of
507 		 the matched constraint.  This assumes not only that the
508 		 other constraint is an output constraint, but also that
509 		 the '=' or '+' come first.  */
510 	      break;
511 	    }
512 	  else
513 	    j = end - constraint;
514 	  /* Anticipate increment at end of loop.  */
515 	  j--;
516 	}
517 	/* Fall through.  */
518 
519       case 'p':  case 'r':
520 	*allows_reg = true;
521 	break;
522 
523       case 'g':  case 'X':
524 	*allows_reg = true;
525 	*allows_mem = true;
526 	break;
527 
528       default:
529 	if (! ISALPHA (constraint[j]))
530 	  {
531 	    error ("invalid punctuation %qc in constraint", constraint[j]);
532 	    return false;
533 	  }
534 	if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
535 	    != NO_REGS)
536 	  *allows_reg = true;
537 #ifdef EXTRA_CONSTRAINT_STR
538 	else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
539 	  *allows_reg = true;
540 	else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
541 	  *allows_mem = true;
542 	else
543 	  {
544 	    /* Otherwise we can't assume anything about the nature of
545 	       the constraint except that it isn't purely registers.
546 	       Treat it like "g" and hope for the best.  */
547 	    *allows_reg = true;
548 	    *allows_mem = true;
549 	  }
550 #endif
551 	break;
552       }
553 
554   if (saw_match && !*allows_reg)
555     warning (0, "matching constraint does not allow a register");
556 
557   return true;
558 }
559 
560 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
561    can be an asm-declared register.  Called via walk_tree.  */
562 
563 static tree
decl_overlaps_hard_reg_set_p(tree * declp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)564 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
565 			      void *data)
566 {
567   tree decl = *declp;
568   const HARD_REG_SET *regs = data;
569 
570   if (TREE_CODE (decl) == VAR_DECL)
571     {
572       if (DECL_HARD_REGISTER (decl)
573 	  && REG_P (DECL_RTL (decl))
574 	  && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
575 	{
576 	  rtx reg = DECL_RTL (decl);
577 	  unsigned int regno;
578 
579 	  for (regno = REGNO (reg);
580 	       regno < (REGNO (reg)
581 			+ hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
582 	       regno++)
583 	    if (TEST_HARD_REG_BIT (*regs, regno))
584 	      return decl;
585 	}
586       walk_subtrees = 0;
587     }
588   else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
589     walk_subtrees = 0;
590   return NULL_TREE;
591 }
592 
593 /* If there is an overlap between *REGS and DECL, return the first overlap
594    found.  */
595 tree
tree_overlaps_hard_reg_set(tree decl,HARD_REG_SET * regs)596 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
597 {
598   return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
599 }
600 
601 /* Check for overlap between registers marked in CLOBBERED_REGS and
602    anything inappropriate in T.  Emit error and return the register
603    variable definition for error, NULL_TREE for ok.  */
604 
605 static bool
tree_conflicts_with_clobbers_p(tree t,HARD_REG_SET * clobbered_regs)606 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
607 {
608   /* Conflicts between asm-declared register variables and the clobber
609      list are not allowed.  */
610   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
611 
612   if (overlap)
613     {
614       error ("asm-specifier for variable %qs conflicts with asm clobber list",
615 	     IDENTIFIER_POINTER (DECL_NAME (overlap)));
616 
617       /* Reset registerness to stop multiple errors emitted for a single
618 	 variable.  */
619       DECL_REGISTER (overlap) = 0;
620       return true;
621     }
622 
623   return false;
624 }
625 
626 /* Generate RTL for an asm statement with arguments.
627    STRING is the instruction template.
628    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
629    Each output or input has an expression in the TREE_VALUE and
630    and a tree list in TREE_PURPOSE which in turn contains a constraint
631    name in TREE_VALUE (or NULL_TREE) and a constraint string
632    in TREE_PURPOSE.
633    CLOBBERS is a list of STRING_CST nodes each naming a hard register
634    that is clobbered by this insn.
635 
636    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
637    Some elements of OUTPUTS may be replaced with trees representing temporary
638    values.  The caller should copy those temporary values to the originally
639    specified lvalues.
640 
641    VOL nonzero means the insn is volatile; don't optimize it.  */
642 
643 static void
expand_asm_operands(tree string,tree outputs,tree inputs,tree clobbers,int vol,location_t locus)644 expand_asm_operands (tree string, tree outputs, tree inputs,
645 		     tree clobbers, int vol, location_t locus)
646 {
647   rtvec argvec, constraintvec;
648   rtx body;
649   int ninputs = list_length (inputs);
650   int noutputs = list_length (outputs);
651   int ninout;
652   int nclobbers;
653   HARD_REG_SET clobbered_regs;
654   int clobber_conflict_found = 0;
655   tree tail;
656   tree t;
657   int i;
658   /* Vector of RTX's of evaluated output operands.  */
659   rtx *output_rtx = alloca (noutputs * sizeof (rtx));
660   int *inout_opnum = alloca (noutputs * sizeof (int));
661   rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
662   enum machine_mode *inout_mode
663     = alloca (noutputs * sizeof (enum machine_mode));
664   const char **constraints
665     = alloca ((noutputs + ninputs) * sizeof (const char *));
666   int old_generating_concat_p = generating_concat_p;
667 
668   /* An ASM with no outputs needs to be treated as volatile, for now.  */
669   if (noutputs == 0)
670     vol = 1;
671 
672   if (! check_operand_nalternatives (outputs, inputs))
673     return;
674 
675   string = resolve_asm_operand_names (string, outputs, inputs);
676 
677   /* Collect constraints.  */
678   i = 0;
679   for (t = outputs; t ; t = TREE_CHAIN (t), i++)
680     constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
681   for (t = inputs; t ; t = TREE_CHAIN (t), i++)
682     constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
683 
684   /* Sometimes we wish to automatically clobber registers across an asm.
685      Case in point is when the i386 backend moved from cc0 to a hard reg --
686      maintaining source-level compatibility means automatically clobbering
687      the flags register.  */
688   clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
689 
690   /* Count the number of meaningful clobbered registers, ignoring what
691      we would ignore later.  */
692   nclobbers = 0;
693   CLEAR_HARD_REG_SET (clobbered_regs);
694   for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
695     {
696       const char *regname;
697 
698       if (TREE_VALUE (tail) == error_mark_node)
699 	return;
700       regname = TREE_STRING_POINTER (TREE_VALUE (tail));
701 
702       i = decode_reg_name (regname);
703       if (i >= 0 || i == -4)
704 	++nclobbers;
705       else if (i == -2)
706 	error ("unknown register name %qs in %<asm%>", regname);
707 
708       /* Mark clobbered registers.  */
709       if (i >= 0)
710         {
711 	  /* Clobbering the PIC register is an error.  */
712 	  if (i == (int) PIC_OFFSET_TABLE_REGNUM)
713 	    {
714 	      error ("PIC register %qs clobbered in %<asm%>", regname);
715 	      return;
716 	    }
717 
718 	  SET_HARD_REG_BIT (clobbered_regs, i);
719 	}
720     }
721 
722   /* First pass over inputs and outputs checks validity and sets
723      mark_addressable if needed.  */
724 
725   ninout = 0;
726   for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
727     {
728       tree val = TREE_VALUE (tail);
729       tree type = TREE_TYPE (val);
730       const char *constraint;
731       bool is_inout;
732       bool allows_reg;
733       bool allows_mem;
734 
735       /* If there's an erroneous arg, emit no insn.  */
736       if (type == error_mark_node)
737 	return;
738 
739       /* Try to parse the output constraint.  If that fails, there's
740 	 no point in going further.  */
741       constraint = constraints[i];
742       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
743 				    &allows_mem, &allows_reg, &is_inout))
744 	return;
745 
746       if (! allows_reg
747 	  && (allows_mem
748 	      || is_inout
749 	      || (DECL_P (val)
750 		  && REG_P (DECL_RTL (val))
751 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
752 	lang_hooks.mark_addressable (val);
753 
754       if (is_inout)
755 	ninout++;
756     }
757 
758   ninputs += ninout;
759   if (ninputs + noutputs > MAX_RECOG_OPERANDS)
760     {
761       error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
762       return;
763     }
764 
765   for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
766     {
767       bool allows_reg, allows_mem;
768       const char *constraint;
769 
770       /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
771 	 would get VOIDmode and that could cause a crash in reload.  */
772       if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
773 	return;
774 
775       constraint = constraints[i + noutputs];
776       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
777 				    constraints, &allows_mem, &allows_reg))
778 	return;
779 
780       if (! allows_reg && allows_mem)
781 	lang_hooks.mark_addressable (TREE_VALUE (tail));
782     }
783 
784   /* Second pass evaluates arguments.  */
785 
786   ninout = 0;
787   for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
788     {
789       tree val = TREE_VALUE (tail);
790       tree type = TREE_TYPE (val);
791       bool is_inout;
792       bool allows_reg;
793       bool allows_mem;
794       rtx op;
795       bool ok;
796 
797       ok = parse_output_constraint (&constraints[i], i, ninputs,
798 				    noutputs, &allows_mem, &allows_reg,
799 				    &is_inout);
800       gcc_assert (ok);
801 
802       /* If an output operand is not a decl or indirect ref and our constraint
803 	 allows a register, make a temporary to act as an intermediate.
804 	 Make the asm insn write into that, then our caller will copy it to
805 	 the real output operand.  Likewise for promoted variables.  */
806 
807       generating_concat_p = 0;
808 
809       real_output_rtx[i] = NULL_RTX;
810       if ((TREE_CODE (val) == INDIRECT_REF
811 	   && allows_mem)
812 	  || (DECL_P (val)
813 	      && (allows_mem || REG_P (DECL_RTL (val)))
814 	      && ! (REG_P (DECL_RTL (val))
815 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
816 	  || ! allows_reg
817 	  || is_inout)
818 	{
819 	  op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
820 	  if (MEM_P (op))
821 	    op = validize_mem (op);
822 
823 	  if (! allows_reg && !MEM_P (op))
824 	    error ("output number %d not directly addressable", i);
825 	  if ((! allows_mem && MEM_P (op))
826 	      || GET_CODE (op) == CONCAT)
827 	    {
828 	      real_output_rtx[i] = op;
829 	      op = gen_reg_rtx (GET_MODE (op));
830 	      if (is_inout)
831 		emit_move_insn (op, real_output_rtx[i]);
832 	    }
833 	}
834       else
835 	{
836 	  op = assign_temp (type, 0, 0, 1);
837 	  op = validize_mem (op);
838 	  TREE_VALUE (tail) = make_tree (type, op);
839 	}
840       output_rtx[i] = op;
841 
842       generating_concat_p = old_generating_concat_p;
843 
844       if (is_inout)
845 	{
846 	  inout_mode[ninout] = TYPE_MODE (type);
847 	  inout_opnum[ninout++] = i;
848 	}
849 
850       if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
851 	clobber_conflict_found = 1;
852     }
853 
854   /* Make vectors for the expression-rtx, constraint strings,
855      and named operands.  */
856 
857   argvec = rtvec_alloc (ninputs);
858   constraintvec = rtvec_alloc (ninputs);
859 
860   body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
861 				: GET_MODE (output_rtx[0])),
862 			       ggc_strdup (TREE_STRING_POINTER (string)),
863 			       empty_string, 0, argvec, constraintvec,
864 			       locus);
865 
866   MEM_VOLATILE_P (body) = vol;
867 
868   /* Eval the inputs and put them into ARGVEC.
869      Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
870 
871   for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
872     {
873       bool allows_reg, allows_mem;
874       const char *constraint;
875       tree val, type;
876       rtx op;
877       bool ok;
878 
879       constraint = constraints[i + noutputs];
880       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
881 				   constraints, &allows_mem, &allows_reg);
882       gcc_assert (ok);
883 
884       generating_concat_p = 0;
885 
886       val = TREE_VALUE (tail);
887       type = TREE_TYPE (val);
888       /* EXPAND_INITIALIZER will not generate code for valid initializer
889 	 constants, but will still generate code for other types of operand.
890 	 This is the behavior we want for constant constraints.  */
891       op = expand_expr (val, NULL_RTX, VOIDmode,
892 			allows_reg ? EXPAND_NORMAL
893 			: allows_mem ? EXPAND_MEMORY
894 			: EXPAND_INITIALIZER);
895 
896       /* Never pass a CONCAT to an ASM.  */
897       if (GET_CODE (op) == CONCAT)
898 	op = force_reg (GET_MODE (op), op);
899       else if (MEM_P (op))
900 	op = validize_mem (op);
901 
902       if (asm_operand_ok (op, constraint) <= 0)
903 	{
904 	  if (allows_reg && TYPE_MODE (type) != BLKmode)
905 	    op = force_reg (TYPE_MODE (type), op);
906 	  else if (!allows_mem)
907 	    warning (0, "asm operand %d probably doesn%'t match constraints",
908 		     i + noutputs);
909 	  else if (MEM_P (op))
910 	    {
911 	      /* We won't recognize either volatile memory or memory
912 		 with a queued address as available a memory_operand
913 		 at this point.  Ignore it: clearly this *is* a memory.  */
914 	    }
915 	  else
916 	    {
917 	      warning (0, "use of memory input without lvalue in "
918 		       "asm operand %d is deprecated", i + noutputs);
919 
920 	      if (CONSTANT_P (op))
921 		{
922 		  rtx mem = force_const_mem (TYPE_MODE (type), op);
923 		  if (mem)
924 		    op = validize_mem (mem);
925 		  else
926 		    op = force_reg (TYPE_MODE (type), op);
927 		}
928 	      if (REG_P (op)
929 		  || GET_CODE (op) == SUBREG
930 		  || GET_CODE (op) == CONCAT)
931 		{
932 		  tree qual_type = build_qualified_type (type,
933 							 (TYPE_QUALS (type)
934 							  | TYPE_QUAL_CONST));
935 		  rtx memloc = assign_temp (qual_type, 1, 1, 1);
936 		  memloc = validize_mem (memloc);
937 		  emit_move_insn (memloc, op);
938 		  op = memloc;
939 		}
940 	    }
941 	}
942 
943       generating_concat_p = old_generating_concat_p;
944       ASM_OPERANDS_INPUT (body, i) = op;
945 
946       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
947 	= gen_rtx_ASM_INPUT (TYPE_MODE (type),
948 			     ggc_strdup (constraints[i + noutputs]));
949 
950       if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
951 	clobber_conflict_found = 1;
952     }
953 
954   /* Protect all the operands from the queue now that they have all been
955      evaluated.  */
956 
957   generating_concat_p = 0;
958 
959   /* For in-out operands, copy output rtx to input rtx.  */
960   for (i = 0; i < ninout; i++)
961     {
962       int j = inout_opnum[i];
963       char buffer[16];
964 
965       ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
966 	= output_rtx[j];
967 
968       sprintf (buffer, "%d", j);
969       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
970 	= gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
971     }
972 
973   generating_concat_p = old_generating_concat_p;
974 
975   /* Now, for each output, construct an rtx
976      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
977 			       ARGVEC CONSTRAINTS OPNAMES))
978      If there is more than one, put them inside a PARALLEL.  */
979 
980   if (noutputs == 1 && nclobbers == 0)
981     {
982       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
983       emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
984     }
985 
986   else if (noutputs == 0 && nclobbers == 0)
987     {
988       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
989       emit_insn (body);
990     }
991 
992   else
993     {
994       rtx obody = body;
995       int num = noutputs;
996 
997       if (num == 0)
998 	num = 1;
999 
1000       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1001 
1002       /* For each output operand, store a SET.  */
1003       for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1004 	{
1005 	  XVECEXP (body, 0, i)
1006 	    = gen_rtx_SET (VOIDmode,
1007 			   output_rtx[i],
1008 			   gen_rtx_ASM_OPERANDS
1009 			   (GET_MODE (output_rtx[i]),
1010 			    ggc_strdup (TREE_STRING_POINTER (string)),
1011 			    ggc_strdup (constraints[i]),
1012 			    i, argvec, constraintvec, locus));
1013 
1014 	  MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1015 	}
1016 
1017       /* If there are no outputs (but there are some clobbers)
1018 	 store the bare ASM_OPERANDS into the PARALLEL.  */
1019 
1020       if (i == 0)
1021 	XVECEXP (body, 0, i++) = obody;
1022 
1023       /* Store (clobber REG) for each clobbered register specified.  */
1024 
1025       for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1026 	{
1027 	  const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1028 	  int j = decode_reg_name (regname);
1029 	  rtx clobbered_reg;
1030 
1031 	  if (j < 0)
1032 	    {
1033 	      if (j == -3)	/* `cc', which is not a register */
1034 		continue;
1035 
1036 	      if (j == -4)	/* `memory', don't cache memory across asm */
1037 		{
1038 		  XVECEXP (body, 0, i++)
1039 		    = gen_rtx_CLOBBER (VOIDmode,
1040 				       gen_rtx_MEM
1041 				       (BLKmode,
1042 					gen_rtx_SCRATCH (VOIDmode)));
1043 		  continue;
1044 		}
1045 
1046 	      /* Ignore unknown register, error already signaled.  */
1047 	      continue;
1048 	    }
1049 
1050 	  /* Use QImode since that's guaranteed to clobber just one reg.  */
1051 	  clobbered_reg = gen_rtx_REG (QImode, j);
1052 
1053 	  /* Do sanity check for overlap between clobbers and respectively
1054 	     input and outputs that hasn't been handled.  Such overlap
1055 	     should have been detected and reported above.  */
1056 	  if (!clobber_conflict_found)
1057 	    {
1058 	      int opno;
1059 
1060 	      /* We test the old body (obody) contents to avoid tripping
1061 		 over the under-construction body.  */
1062 	      for (opno = 0; opno < noutputs; opno++)
1063 		if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1064 		  internal_error ("asm clobber conflict with output operand");
1065 
1066 	      for (opno = 0; opno < ninputs - ninout; opno++)
1067 		if (reg_overlap_mentioned_p (clobbered_reg,
1068 					     ASM_OPERANDS_INPUT (obody, opno)))
1069 		  internal_error ("asm clobber conflict with input operand");
1070 	    }
1071 
1072 	  XVECEXP (body, 0, i++)
1073 	    = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1074 	}
1075 
1076       emit_insn (body);
1077     }
1078 
1079   /* For any outputs that needed reloading into registers, spill them
1080      back to where they belong.  */
1081   for (i = 0; i < noutputs; ++i)
1082     if (real_output_rtx[i])
1083       emit_move_insn (real_output_rtx[i], output_rtx[i]);
1084 
1085   free_temp_slots ();
1086 }
1087 
1088 void
expand_asm_expr(tree exp)1089 expand_asm_expr (tree exp)
1090 {
1091   int noutputs, i;
1092   tree outputs, tail;
1093   tree *o;
1094 
1095   if (ASM_INPUT_P (exp))
1096     {
1097       expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1098       return;
1099     }
1100 
1101   outputs = ASM_OUTPUTS (exp);
1102   noutputs = list_length (outputs);
1103   /* o[I] is the place that output number I should be written.  */
1104   o = (tree *) alloca (noutputs * sizeof (tree));
1105 
1106   /* Record the contents of OUTPUTS before it is modified.  */
1107   for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1108     o[i] = TREE_VALUE (tail);
1109 
1110   /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1111      OUTPUTS some trees for where the values were actually stored.  */
1112   expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1113 		       ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1114 		       input_location);
1115 
1116   /* Copy all the intermediate outputs into the specified outputs.  */
1117   for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1118     {
1119       if (o[i] != TREE_VALUE (tail))
1120 	{
1121 	  expand_assignment (o[i], TREE_VALUE (tail));
1122 	  free_temp_slots ();
1123 
1124 	  /* Restore the original value so that it's correct the next
1125 	     time we expand this function.  */
1126 	  TREE_VALUE (tail) = o[i];
1127 	}
1128     }
1129 }
1130 
1131 /* A subroutine of expand_asm_operands.  Check that all operands have
1132    the same number of alternatives.  Return true if so.  */
1133 
1134 static bool
check_operand_nalternatives(tree outputs,tree inputs)1135 check_operand_nalternatives (tree outputs, tree inputs)
1136 {
1137   if (outputs || inputs)
1138     {
1139       tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1140       int nalternatives
1141 	= n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1142       tree next = inputs;
1143 
1144       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1145 	{
1146 	  error ("too many alternatives in %<asm%>");
1147 	  return false;
1148 	}
1149 
1150       tmp = outputs;
1151       while (tmp)
1152 	{
1153 	  const char *constraint
1154 	    = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1155 
1156 	  if (n_occurrences (',', constraint) != nalternatives)
1157 	    {
1158 	      error ("operand constraints for %<asm%> differ "
1159 		     "in number of alternatives");
1160 	      return false;
1161 	    }
1162 
1163 	  if (TREE_CHAIN (tmp))
1164 	    tmp = TREE_CHAIN (tmp);
1165 	  else
1166 	    tmp = next, next = 0;
1167 	}
1168     }
1169 
1170   return true;
1171 }
1172 
1173 /* A subroutine of expand_asm_operands.  Check that all operand names
1174    are unique.  Return true if so.  We rely on the fact that these names
1175    are identifiers, and so have been canonicalized by get_identifier,
1176    so all we need are pointer comparisons.  */
1177 
1178 static bool
check_unique_operand_names(tree outputs,tree inputs)1179 check_unique_operand_names (tree outputs, tree inputs)
1180 {
1181   tree i, j;
1182 
1183   for (i = outputs; i ; i = TREE_CHAIN (i))
1184     {
1185       tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1186       if (! i_name)
1187 	continue;
1188 
1189       for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1190 	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1191 	  goto failure;
1192     }
1193 
1194   for (i = inputs; i ; i = TREE_CHAIN (i))
1195     {
1196       tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1197       if (! i_name)
1198 	continue;
1199 
1200       for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1201 	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1202 	  goto failure;
1203       for (j = outputs; j ; j = TREE_CHAIN (j))
1204 	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1205 	  goto failure;
1206     }
1207 
1208   return true;
1209 
1210  failure:
1211   error ("duplicate asm operand name %qs",
1212 	 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1213   return false;
1214 }
1215 
1216 /* A subroutine of expand_asm_operands.  Resolve the names of the operands
1217    in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1218    STRING and in the constraints to those numbers.  */
1219 
1220 tree
resolve_asm_operand_names(tree string,tree outputs,tree inputs)1221 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1222 {
1223   char *buffer;
1224   char *p;
1225   const char *c;
1226   tree t;
1227 
1228   check_unique_operand_names (outputs, inputs);
1229 
1230   /* Substitute [<name>] in input constraint strings.  There should be no
1231      named operands in output constraints.  */
1232   for (t = inputs; t ; t = TREE_CHAIN (t))
1233     {
1234       c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1235       if (strchr (c, '[') != NULL)
1236 	{
1237 	  p = buffer = xstrdup (c);
1238 	  while ((p = strchr (p, '[')) != NULL)
1239 	    p = resolve_operand_name_1 (p, outputs, inputs);
1240 	  TREE_VALUE (TREE_PURPOSE (t))
1241 	    = build_string (strlen (buffer), buffer);
1242 	  free (buffer);
1243 	}
1244     }
1245 
1246   /* Now check for any needed substitutions in the template.  */
1247   c = TREE_STRING_POINTER (string);
1248   while ((c = strchr (c, '%')) != NULL)
1249     {
1250       if (c[1] == '[')
1251 	break;
1252       else if (ISALPHA (c[1]) && c[2] == '[')
1253 	break;
1254       else
1255 	{
1256 	  c += 1;
1257 	  continue;
1258 	}
1259     }
1260 
1261   if (c)
1262     {
1263       /* OK, we need to make a copy so we can perform the substitutions.
1264 	 Assume that we will not need extra space--we get to remove '['
1265 	 and ']', which means we cannot have a problem until we have more
1266 	 than 999 operands.  */
1267       buffer = xstrdup (TREE_STRING_POINTER (string));
1268       p = buffer + (c - TREE_STRING_POINTER (string));
1269 
1270       while ((p = strchr (p, '%')) != NULL)
1271 	{
1272 	  if (p[1] == '[')
1273 	    p += 1;
1274 	  else if (ISALPHA (p[1]) && p[2] == '[')
1275 	    p += 2;
1276 	  else
1277 	    {
1278 	      p += 1;
1279 	      continue;
1280 	    }
1281 
1282 	  p = resolve_operand_name_1 (p, outputs, inputs);
1283 	}
1284 
1285       string = build_string (strlen (buffer), buffer);
1286       free (buffer);
1287     }
1288 
1289   return string;
1290 }
1291 
1292 /* A subroutine of resolve_operand_names.  P points to the '[' for a
1293    potential named operand of the form [<name>].  In place, replace
1294    the name and brackets with a number.  Return a pointer to the
1295    balance of the string after substitution.  */
1296 
1297 static char *
resolve_operand_name_1(char * p,tree outputs,tree inputs)1298 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1299 {
1300   char *q;
1301   int op;
1302   tree t;
1303   size_t len;
1304 
1305   /* Collect the operand name.  */
1306   q = strchr (p, ']');
1307   if (!q)
1308     {
1309       error ("missing close brace for named operand");
1310       return strchr (p, '\0');
1311     }
1312   len = q - p - 1;
1313 
1314   /* Resolve the name to a number.  */
1315   for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1316     {
1317       tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1318       if (name)
1319 	{
1320 	  const char *c = TREE_STRING_POINTER (name);
1321 	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1322 	    goto found;
1323 	}
1324     }
1325   for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1326     {
1327       tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1328       if (name)
1329 	{
1330 	  const char *c = TREE_STRING_POINTER (name);
1331 	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1332 	    goto found;
1333 	}
1334     }
1335 
1336   *q = '\0';
1337   error ("undefined named operand %qs", p + 1);
1338   op = 0;
1339  found:
1340 
1341   /* Replace the name with the number.  Unfortunately, not all libraries
1342      get the return value of sprintf correct, so search for the end of the
1343      generated string by hand.  */
1344   sprintf (p, "%d", op);
1345   p = strchr (p, '\0');
1346 
1347   /* Verify the no extra buffer space assumption.  */
1348   gcc_assert (p <= q);
1349 
1350   /* Shift the rest of the buffer down to fill the gap.  */
1351   memmove (p, q + 1, strlen (q + 1) + 1);
1352 
1353   return p;
1354 }
1355 
1356 /* Generate RTL to evaluate the expression EXP.  */
1357 
1358 void
expand_expr_stmt(tree exp)1359 expand_expr_stmt (tree exp)
1360 {
1361   rtx value;
1362   tree type;
1363 
1364   value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1365   type = TREE_TYPE (exp);
1366 
1367   /* If all we do is reference a volatile value in memory,
1368      copy it to a register to be sure it is actually touched.  */
1369   if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1370     {
1371       if (TYPE_MODE (type) == VOIDmode)
1372 	;
1373       else if (TYPE_MODE (type) != BLKmode)
1374 	value = copy_to_reg (value);
1375       else
1376 	{
1377 	  rtx lab = gen_label_rtx ();
1378 
1379 	  /* Compare the value with itself to reference it.  */
1380 	  emit_cmp_and_jump_insns (value, value, EQ,
1381 				   expand_normal (TYPE_SIZE (type)),
1382 				   BLKmode, 0, lab);
1383 	  emit_label (lab);
1384 	}
1385     }
1386 
1387   /* Free any temporaries used to evaluate this expression.  */
1388   free_temp_slots ();
1389 }
1390 
1391 /* Warn if EXP contains any computations whose results are not used.
1392    Return 1 if a warning is printed; 0 otherwise.  LOCUS is the
1393    (potential) location of the expression.  */
1394 
1395 int
warn_if_unused_value(tree exp,location_t locus)1396 warn_if_unused_value (tree exp, location_t locus)
1397 {
1398  restart:
1399   if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1400     return 0;
1401 
1402   /* Don't warn about void constructs.  This includes casting to void,
1403      void function calls, and statement expressions with a final cast
1404      to void.  */
1405   if (VOID_TYPE_P (TREE_TYPE (exp)))
1406     return 0;
1407 
1408   if (EXPR_HAS_LOCATION (exp))
1409     locus = EXPR_LOCATION (exp);
1410 
1411   switch (TREE_CODE (exp))
1412     {
1413     case PREINCREMENT_EXPR:
1414     case POSTINCREMENT_EXPR:
1415     case PREDECREMENT_EXPR:
1416     case POSTDECREMENT_EXPR:
1417     case MODIFY_EXPR:
1418     case INIT_EXPR:
1419     case TARGET_EXPR:
1420     case CALL_EXPR:
1421     case TRY_CATCH_EXPR:
1422     case WITH_CLEANUP_EXPR:
1423     case EXIT_EXPR:
1424     case VA_ARG_EXPR:
1425       return 0;
1426 
1427     case BIND_EXPR:
1428       /* For a binding, warn if no side effect within it.  */
1429       exp = BIND_EXPR_BODY (exp);
1430       goto restart;
1431 
1432     case SAVE_EXPR:
1433       exp = TREE_OPERAND (exp, 0);
1434       goto restart;
1435 
1436     case TRUTH_ORIF_EXPR:
1437     case TRUTH_ANDIF_EXPR:
1438       /* In && or ||, warn if 2nd operand has no side effect.  */
1439       exp = TREE_OPERAND (exp, 1);
1440       goto restart;
1441 
1442     case COMPOUND_EXPR:
1443       if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1444 	return 1;
1445       /* Let people do `(foo (), 0)' without a warning.  */
1446       if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1447 	return 0;
1448       exp = TREE_OPERAND (exp, 1);
1449       goto restart;
1450 
1451     case COND_EXPR:
1452       /* If this is an expression with side effects, don't warn; this
1453 	 case commonly appears in macro expansions.  */
1454       if (TREE_SIDE_EFFECTS (exp))
1455 	return 0;
1456       goto warn;
1457 
1458     case INDIRECT_REF:
1459       /* Don't warn about automatic dereferencing of references, since
1460 	 the user cannot control it.  */
1461       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1462 	{
1463 	  exp = TREE_OPERAND (exp, 0);
1464 	  goto restart;
1465 	}
1466       /* Fall through.  */
1467 
1468     default:
1469       /* Referencing a volatile value is a side effect, so don't warn.  */
1470       if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1471 	  && TREE_THIS_VOLATILE (exp))
1472 	return 0;
1473 
1474       /* If this is an expression which has no operands, there is no value
1475 	 to be unused.  There are no such language-independent codes,
1476 	 but front ends may define such.  */
1477       if (EXPRESSION_CLASS_P (exp) && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1478 	return 0;
1479 
1480     warn:
1481       warning (0, "%Hvalue computed is not used", &locus);
1482       return 1;
1483     }
1484 }
1485 
1486 
1487 /* Generate RTL to return from the current function, with no value.
1488    (That is, we do not do anything about returning any value.)  */
1489 
1490 void
expand_null_return(void)1491 expand_null_return (void)
1492 {
1493   /* If this function was declared to return a value, but we
1494      didn't, clobber the return registers so that they are not
1495      propagated live to the rest of the function.  */
1496   clobber_return_register ();
1497 
1498   expand_null_return_1 ();
1499 }
1500 
1501 /* Generate RTL to return directly from the current function.
1502    (That is, we bypass any return value.)  */
1503 
1504 void
expand_naked_return(void)1505 expand_naked_return (void)
1506 {
1507   rtx end_label;
1508 
1509   clear_pending_stack_adjust ();
1510   do_pending_stack_adjust ();
1511 
1512   end_label = naked_return_label;
1513   if (end_label == 0)
1514     end_label = naked_return_label = gen_label_rtx ();
1515 
1516   emit_jump (end_label);
1517 }
1518 
1519 /* Generate RTL to return from the current function, with value VAL.  */
1520 
1521 static void
expand_value_return(rtx val)1522 expand_value_return (rtx val)
1523 {
1524   /* Copy the value to the return location
1525      unless it's already there.  */
1526 
1527   rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1528   if (return_reg != val)
1529     {
1530       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1531       if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1532       {
1533 	int unsignedp = TYPE_UNSIGNED (type);
1534 	enum machine_mode old_mode
1535 	  = DECL_MODE (DECL_RESULT (current_function_decl));
1536 	enum machine_mode mode
1537 	  = promote_mode (type, old_mode, &unsignedp, 1);
1538 
1539 	if (mode != old_mode)
1540 	  val = convert_modes (mode, old_mode, val, unsignedp);
1541       }
1542       if (GET_CODE (return_reg) == PARALLEL)
1543 	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1544       else
1545 	emit_move_insn (return_reg, val);
1546     }
1547 
1548   expand_null_return_1 ();
1549 }
1550 
1551 /* Output a return with no value.  */
1552 
1553 static void
expand_null_return_1(void)1554 expand_null_return_1 (void)
1555 {
1556   clear_pending_stack_adjust ();
1557   do_pending_stack_adjust ();
1558   emit_jump (return_label);
1559 }
1560 
1561 /* Generate RTL to evaluate the expression RETVAL and return it
1562    from the current function.  */
1563 
1564 void
expand_return(tree retval)1565 expand_return (tree retval)
1566 {
1567   rtx result_rtl;
1568   rtx val = 0;
1569   tree retval_rhs;
1570 
1571   /* If function wants no value, give it none.  */
1572   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1573     {
1574       expand_normal (retval);
1575       expand_null_return ();
1576       return;
1577     }
1578 
1579   if (retval == error_mark_node)
1580     {
1581       /* Treat this like a return of no value from a function that
1582 	 returns a value.  */
1583       expand_null_return ();
1584       return;
1585     }
1586   else if ((TREE_CODE (retval) == MODIFY_EXPR
1587 	    || TREE_CODE (retval) == INIT_EXPR)
1588 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1589     retval_rhs = TREE_OPERAND (retval, 1);
1590   else
1591     retval_rhs = retval;
1592 
1593   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1594 
1595   /* If we are returning the RESULT_DECL, then the value has already
1596      been stored into it, so we don't have to do anything special.  */
1597   if (TREE_CODE (retval_rhs) == RESULT_DECL)
1598     expand_value_return (result_rtl);
1599 
1600   /* If the result is an aggregate that is being returned in one (or more)
1601      registers, load the registers here.  The compiler currently can't handle
1602      copying a BLKmode value into registers.  We could put this code in a
1603      more general area (for use by everyone instead of just function
1604      call/return), but until this feature is generally usable it is kept here
1605      (and in expand_call).  */
1606 
1607   else if (retval_rhs != 0
1608 	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1609 	   && REG_P (result_rtl))
1610     {
1611       int i;
1612       unsigned HOST_WIDE_INT bitpos, xbitpos;
1613       unsigned HOST_WIDE_INT padding_correction = 0;
1614       unsigned HOST_WIDE_INT bytes
1615 	= int_size_in_bytes (TREE_TYPE (retval_rhs));
1616       int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1617       unsigned int bitsize
1618 	= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1619       rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
1620       rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1621       rtx result_val = expand_normal (retval_rhs);
1622       enum machine_mode tmpmode, result_reg_mode;
1623 
1624       if (bytes == 0)
1625 	{
1626 	  expand_null_return ();
1627 	  return;
1628 	}
1629 
1630       /* If the structure doesn't take up a whole number of words, see
1631 	 whether the register value should be padded on the left or on
1632 	 the right.  Set PADDING_CORRECTION to the number of padding
1633 	 bits needed on the left side.
1634 
1635 	 In most ABIs, the structure will be returned at the least end of
1636 	 the register, which translates to right padding on little-endian
1637 	 targets and left padding on big-endian targets.  The opposite
1638 	 holds if the structure is returned at the most significant
1639 	 end of the register.  */
1640       if (bytes % UNITS_PER_WORD != 0
1641 	  && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1642 	      ? !BYTES_BIG_ENDIAN
1643 	      : BYTES_BIG_ENDIAN))
1644 	padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1645 					       * BITS_PER_UNIT));
1646 
1647       /* Copy the structure BITSIZE bits at a time.  */
1648       for (bitpos = 0, xbitpos = padding_correction;
1649 	   bitpos < bytes * BITS_PER_UNIT;
1650 	   bitpos += bitsize, xbitpos += bitsize)
1651 	{
1652 	  /* We need a new destination pseudo each time xbitpos is
1653 	     on a word boundary and when xbitpos == padding_correction
1654 	     (the first time through).  */
1655 	  if (xbitpos % BITS_PER_WORD == 0
1656 	      || xbitpos == padding_correction)
1657 	    {
1658 	      /* Generate an appropriate register.  */
1659 	      dst = gen_reg_rtx (word_mode);
1660 	      result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1661 
1662 	      /* Clear the destination before we move anything into it.  */
1663 	      emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1664 	    }
1665 
1666 	  /* We need a new source operand each time bitpos is on a word
1667 	     boundary.  */
1668 	  if (bitpos % BITS_PER_WORD == 0)
1669 	    src = operand_subword_force (result_val,
1670 					 bitpos / BITS_PER_WORD,
1671 					 BLKmode);
1672 
1673 	  /* Use bitpos for the source extraction (left justified) and
1674 	     xbitpos for the destination store (right justified).  */
1675 	  store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1676 			   extract_bit_field (src, bitsize,
1677 					      bitpos % BITS_PER_WORD, 1,
1678 					      NULL_RTX, word_mode, word_mode));
1679 	}
1680 
1681       tmpmode = GET_MODE (result_rtl);
1682       if (tmpmode == BLKmode)
1683 	{
1684 	  /* Find the smallest integer mode large enough to hold the
1685 	     entire structure and use that mode instead of BLKmode
1686 	     on the USE insn for the return register.  */
1687 	  for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1688 	       tmpmode != VOIDmode;
1689 	       tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1690 	    /* Have we found a large enough mode?  */
1691 	    if (GET_MODE_SIZE (tmpmode) >= bytes)
1692 	      break;
1693 
1694 	  /* A suitable mode should have been found.  */
1695 	  gcc_assert (tmpmode != VOIDmode);
1696 
1697 	  PUT_MODE (result_rtl, tmpmode);
1698 	}
1699 
1700       if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1701 	result_reg_mode = word_mode;
1702       else
1703 	result_reg_mode = tmpmode;
1704       result_reg = gen_reg_rtx (result_reg_mode);
1705 
1706       for (i = 0; i < n_regs; i++)
1707 	emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1708 			result_pseudos[i]);
1709 
1710       if (tmpmode != result_reg_mode)
1711 	result_reg = gen_lowpart (tmpmode, result_reg);
1712 
1713       expand_value_return (result_reg);
1714     }
1715   else if (retval_rhs != 0
1716 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1717 	   && (REG_P (result_rtl)
1718 	       || (GET_CODE (result_rtl) == PARALLEL)))
1719     {
1720       /* Calculate the return value into a temporary (usually a pseudo
1721          reg).  */
1722       tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1723       tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1724 
1725       val = assign_temp (nt, 0, 0, 1);
1726       val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
1727       val = force_not_mem (val);
1728       /* Return the calculated value.  */
1729       expand_value_return (val);
1730     }
1731   else
1732     {
1733       /* No hard reg used; calculate value into hard return reg.  */
1734       expand_expr (retval, const0_rtx, VOIDmode, 0);
1735       expand_value_return (result_rtl);
1736     }
1737 }
1738 
1739 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
1740    in question represents the outermost pair of curly braces (i.e. the "body
1741    block") of a function or method.
1742 
1743    For any BLOCK node representing a "body block" of a function or method, the
1744    BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
1745    represents the outermost (function) scope for the function or method (i.e.
1746    the one which includes the formal parameters).  The BLOCK_SUPERCONTEXT of
1747    *that* node in turn will point to the relevant FUNCTION_DECL node.  */
1748 
1749 int
is_body_block(tree stmt)1750 is_body_block (tree stmt)
1751 {
1752   if (lang_hooks.no_body_blocks)
1753     return 0;
1754 
1755   if (TREE_CODE (stmt) == BLOCK)
1756     {
1757       tree parent = BLOCK_SUPERCONTEXT (stmt);
1758 
1759       if (parent && TREE_CODE (parent) == BLOCK)
1760 	{
1761 	  tree grandparent = BLOCK_SUPERCONTEXT (parent);
1762 
1763 	  if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
1764 	    return 1;
1765 	}
1766     }
1767 
1768   return 0;
1769 }
1770 
1771 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1772    handler.  */
1773 static void
expand_nl_goto_receiver(void)1774 expand_nl_goto_receiver (void)
1775 {
1776   /* Clobber the FP when we get here, so we have to make sure it's
1777      marked as used by this function.  */
1778   emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
1779 
1780   /* Mark the static chain as clobbered here so life information
1781      doesn't get messed up for it.  */
1782   emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
1783 
1784 #ifdef HAVE_nonlocal_goto
1785   if (! HAVE_nonlocal_goto)
1786 #endif
1787     /* First adjust our frame pointer to its actual value.  It was
1788        previously set to the start of the virtual area corresponding to
1789        the stacked variables when we branched here and now needs to be
1790        adjusted to the actual hardware fp value.
1791 
1792        Assignments are to virtual registers are converted by
1793        instantiate_virtual_regs into the corresponding assignment
1794        to the underlying register (fp in this case) that makes
1795        the original assignment true.
1796        So the following insn will actually be
1797        decrementing fp by STARTING_FRAME_OFFSET.  */
1798     emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1799 
1800 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1801   if (fixed_regs[ARG_POINTER_REGNUM])
1802     {
1803 #ifdef ELIMINABLE_REGS
1804       /* If the argument pointer can be eliminated in favor of the
1805 	 frame pointer, we don't need to restore it.  We assume here
1806 	 that if such an elimination is present, it can always be used.
1807 	 This is the case on all known machines; if we don't make this
1808 	 assumption, we do unnecessary saving on many machines.  */
1809       static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1810       size_t i;
1811 
1812       for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1813 	if (elim_regs[i].from == ARG_POINTER_REGNUM
1814 	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1815 	  break;
1816 
1817       if (i == ARRAY_SIZE (elim_regs))
1818 #endif
1819 	{
1820 	  /* Now restore our arg pointer from the address at which it
1821 	     was saved in our stack frame.  */
1822 	  emit_move_insn (virtual_incoming_args_rtx,
1823 			  copy_to_reg (get_arg_pointer_save_area (cfun)));
1824 	}
1825     }
1826 #endif
1827 
1828 #ifdef HAVE_nonlocal_goto_receiver
1829   if (HAVE_nonlocal_goto_receiver)
1830     emit_insn (gen_nonlocal_goto_receiver ());
1831 #endif
1832 
1833   /* @@@ This is a kludge.  Not all machine descriptions define a blockage
1834      insn, but we must not allow the code we just generated to be reordered
1835      by scheduling.  Specifically, the update of the frame pointer must
1836      happen immediately, not later.  So emit an ASM_INPUT to act as blockage
1837      insn.  */
1838   emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1839 }
1840 
1841 /* Generate RTL for the automatic variable declaration DECL.
1842    (Other kinds of declarations are simply ignored if seen here.)  */
1843 
1844 void
expand_decl(tree decl)1845 expand_decl (tree decl)
1846 {
1847   tree type;
1848 
1849   type = TREE_TYPE (decl);
1850 
1851   /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1852      type in case this node is used in a reference.  */
1853   if (TREE_CODE (decl) == CONST_DECL)
1854     {
1855       DECL_MODE (decl) = TYPE_MODE (type);
1856       DECL_ALIGN (decl) = TYPE_ALIGN (type);
1857       DECL_SIZE (decl) = TYPE_SIZE (type);
1858       DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1859       return;
1860     }
1861 
1862   /* Otherwise, only automatic variables need any expansion done.  Static and
1863      external variables, and external functions, will be handled by
1864      `assemble_variable' (called from finish_decl).  TYPE_DECL requires
1865      nothing.  PARM_DECLs are handled in `assign_parms'.  */
1866   if (TREE_CODE (decl) != VAR_DECL)
1867     return;
1868 
1869   if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1870     return;
1871 
1872   /* Create the RTL representation for the variable.  */
1873 
1874   if (type == error_mark_node)
1875     SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1876 
1877   else if (DECL_SIZE (decl) == 0)
1878     /* Variable with incomplete type.  */
1879     {
1880       rtx x;
1881       if (DECL_INITIAL (decl) == 0)
1882 	/* Error message was already done; now avoid a crash.  */
1883 	x = gen_rtx_MEM (BLKmode, const0_rtx);
1884       else
1885 	/* An initializer is going to decide the size of this array.
1886 	   Until we know the size, represent its address with a reg.  */
1887 	x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1888 
1889       set_mem_attributes (x, decl, 1);
1890       SET_DECL_RTL (decl, x);
1891     }
1892   else if (use_register_for_decl (decl))
1893     {
1894       /* Automatic variable that can go in a register.  */
1895       int unsignedp = TYPE_UNSIGNED (type);
1896       enum machine_mode reg_mode
1897 	= promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1898 
1899       SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1900 
1901       /* Note if the object is a user variable.  */
1902       if (!DECL_ARTIFICIAL (decl))
1903 	{
1904 	  mark_user_reg (DECL_RTL (decl));
1905 
1906 	  /* Trust user variables which have a pointer type to really
1907 	     be pointers.  Do not trust compiler generated temporaries
1908 	     as our type system is totally busted as it relates to
1909 	     pointer arithmetic which translates into lots of compiler
1910 	     generated objects with pointer types, but which are not really
1911 	     pointers.  */
1912 	  if (POINTER_TYPE_P (type))
1913 	    mark_reg_pointer (DECL_RTL (decl),
1914 			      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1915 	}
1916     }
1917 
1918   else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1919 	   && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
1920 		 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
1921 					  STACK_CHECK_MAX_VAR_SIZE)))
1922     {
1923       /* Variable of fixed size that goes on the stack.  */
1924       rtx oldaddr = 0;
1925       rtx addr;
1926       rtx x;
1927 
1928       /* If we previously made RTL for this decl, it must be an array
1929 	 whose size was determined by the initializer.
1930 	 The old address was a register; set that register now
1931 	 to the proper address.  */
1932       if (DECL_RTL_SET_P (decl))
1933 	{
1934 	  gcc_assert (MEM_P (DECL_RTL (decl)));
1935 	  gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1936 	  oldaddr = XEXP (DECL_RTL (decl), 0);
1937 	}
1938 
1939       /* Set alignment we actually gave this decl.  */
1940       DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1941 			   : GET_MODE_BITSIZE (DECL_MODE (decl)));
1942       DECL_USER_ALIGN (decl) = 0;
1943 
1944       x = assign_temp (decl, 1, 1, 1);
1945       set_mem_attributes (x, decl, 1);
1946       SET_DECL_RTL (decl, x);
1947 
1948       if (oldaddr)
1949 	{
1950 	  addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1951 	  if (addr != oldaddr)
1952 	    emit_move_insn (oldaddr, addr);
1953 	}
1954     }
1955   else
1956     /* Dynamic-size object: must push space on the stack.  */
1957     {
1958       rtx address, size, x;
1959 
1960       /* Record the stack pointer on entry to block, if have
1961 	 not already done so.  */
1962       do_pending_stack_adjust ();
1963 
1964       /* Compute the variable's size, in bytes.  This will expand any
1965 	 needed SAVE_EXPRs for the first time.  */
1966       size = expand_normal (DECL_SIZE_UNIT (decl));
1967       free_temp_slots ();
1968 
1969       /* Allocate space on the stack for the variable.  Note that
1970 	 DECL_ALIGN says how the variable is to be aligned and we
1971 	 cannot use it to conclude anything about the alignment of
1972 	 the size.  */
1973       address = allocate_dynamic_stack_space (size, NULL_RTX,
1974 					      TYPE_ALIGN (TREE_TYPE (decl)));
1975 
1976       /* Reference the variable indirect through that rtx.  */
1977       x = gen_rtx_MEM (DECL_MODE (decl), address);
1978       set_mem_attributes (x, decl, 1);
1979       SET_DECL_RTL (decl, x);
1980 
1981 
1982       /* Indicate the alignment we actually gave this variable.  */
1983 #ifdef STACK_BOUNDARY
1984       DECL_ALIGN (decl) = STACK_BOUNDARY;
1985 #else
1986       DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
1987 #endif
1988       DECL_USER_ALIGN (decl) = 0;
1989     }
1990 }
1991 
1992 /* Emit code to save the current value of stack.  */
1993 rtx
expand_stack_save(void)1994 expand_stack_save (void)
1995 {
1996   rtx ret = NULL_RTX;
1997 
1998   do_pending_stack_adjust ();
1999   emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2000   return ret;
2001 }
2002 
2003 /* Emit code to restore the current value of stack.  */
2004 void
expand_stack_restore(tree var)2005 expand_stack_restore (tree var)
2006 {
2007   rtx sa = DECL_RTL (var);
2008 
2009   emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2010 }
2011 
2012 /* DECL is an anonymous union.  CLEANUP is a cleanup for DECL.
2013    DECL_ELTS is the list of elements that belong to DECL's type.
2014    In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup.  */
2015 
2016 void
expand_anon_union_decl(tree decl,tree cleanup ATTRIBUTE_UNUSED,tree decl_elts)2017 expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2018 			tree decl_elts)
2019 {
2020   rtx x;
2021   tree t;
2022 
2023   /* If any of the elements are addressable, so is the entire union.  */
2024   for (t = decl_elts; t; t = TREE_CHAIN (t))
2025     if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2026       {
2027 	TREE_ADDRESSABLE (decl) = 1;
2028 	break;
2029       }
2030 
2031   expand_decl (decl);
2032   x = DECL_RTL (decl);
2033 
2034   /* Go through the elements, assigning RTL to each.  */
2035   for (t = decl_elts; t; t = TREE_CHAIN (t))
2036     {
2037       tree decl_elt = TREE_VALUE (t);
2038       enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2039       rtx decl_rtl;
2040 
2041       /* If any of the elements are addressable, so is the entire
2042 	 union.  */
2043       if (TREE_USED (decl_elt))
2044 	TREE_USED (decl) = 1;
2045 
2046       /* Propagate the union's alignment to the elements.  */
2047       DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2048       DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2049 
2050       /* If the element has BLKmode and the union doesn't, the union is
2051          aligned such that the element doesn't need to have BLKmode, so
2052          change the element's mode to the appropriate one for its size.  */
2053       if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2054 	DECL_MODE (decl_elt) = mode
2055 	  = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2056 
2057       if (mode == GET_MODE (x))
2058 	decl_rtl = x;
2059       else if (MEM_P (x))
2060         /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2061            instead create a new MEM rtx with the proper mode.  */
2062 	decl_rtl = adjust_address_nv (x, mode, 0);
2063       else
2064 	{
2065 	  gcc_assert (REG_P (x));
2066 	  decl_rtl = gen_lowpart_SUBREG (mode, x);
2067 	}
2068       SET_DECL_RTL (decl_elt, decl_rtl);
2069     }
2070 }
2071 
2072 /* Do the insertion of a case label into case_list.  The labels are
2073    fed to us in descending order from the sorted vector of case labels used
2074    in the tree part of the middle end.  So the list we construct is
2075    sorted in ascending order.  The bounds on the case range, LOW and HIGH,
2076    are converted to case's index type TYPE.  */
2077 
2078 static struct case_node *
add_case_node(struct case_node * head,tree type,tree low,tree high,tree label)2079 add_case_node (struct case_node *head, tree type, tree low, tree high,
2080 	       tree label)
2081 {
2082   tree min_value, max_value;
2083   struct case_node *r;
2084 
2085   gcc_assert (TREE_CODE (low) == INTEGER_CST);
2086   gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2087 
2088   min_value = TYPE_MIN_VALUE (type);
2089   max_value = TYPE_MAX_VALUE (type);
2090 
2091   /* If there's no HIGH value, then this is not a case range; it's
2092      just a simple case label.  But that's just a degenerate case
2093      range.
2094      If the bounds are equal, turn this into the one-value case.  */
2095   if (!high || tree_int_cst_equal (low, high))
2096     {
2097       /* If the simple case value is unreachable, ignore it.  */
2098       if ((TREE_CODE (min_value) == INTEGER_CST
2099             && tree_int_cst_compare (low, min_value) < 0)
2100 	  || (TREE_CODE (max_value) == INTEGER_CST
2101 	      && tree_int_cst_compare (low, max_value) > 0))
2102 	return head;
2103       low = fold_convert (type, low);
2104       high = low;
2105     }
2106   else
2107     {
2108       /* If the entire case range is unreachable, ignore it.  */
2109       if ((TREE_CODE (min_value) == INTEGER_CST
2110             && tree_int_cst_compare (high, min_value) < 0)
2111 	  || (TREE_CODE (max_value) == INTEGER_CST
2112 	      && tree_int_cst_compare (low, max_value) > 0))
2113 	return head;
2114 
2115       /* If the lower bound is less than the index type's minimum
2116 	 value, truncate the range bounds.  */
2117       if (TREE_CODE (min_value) == INTEGER_CST
2118             && tree_int_cst_compare (low, min_value) < 0)
2119 	low = min_value;
2120       low = fold_convert (type, low);
2121 
2122       /* If the upper bound is greater than the index type's maximum
2123 	 value, truncate the range bounds.  */
2124       if (TREE_CODE (max_value) == INTEGER_CST
2125 	  && tree_int_cst_compare (high, max_value) > 0)
2126 	high = max_value;
2127       high = fold_convert (type, high);
2128     }
2129 
2130 
2131   /* Add this label to the chain.  Make sure to drop overflow flags.  */
2132   r = ggc_alloc (sizeof (struct case_node));
2133   r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2134 			       TREE_INT_CST_HIGH (low));
2135   r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2136 				TREE_INT_CST_HIGH (high));
2137   r->code_label = label;
2138   r->parent = r->left = NULL;
2139   r->right = head;
2140   return r;
2141 }
2142 
2143 /* Maximum number of case bit tests.  */
2144 #define MAX_CASE_BIT_TESTS  3
2145 
2146 /* By default, enable case bit tests on targets with ashlsi3.  */
2147 #ifndef CASE_USE_BIT_TESTS
2148 #define CASE_USE_BIT_TESTS  (ashl_optab->handlers[word_mode].insn_code \
2149 			     != CODE_FOR_nothing)
2150 #endif
2151 
2152 
2153 /* A case_bit_test represents a set of case nodes that may be
2154    selected from using a bit-wise comparison.  HI and LO hold
2155    the integer to be tested against, LABEL contains the label
2156    to jump to upon success and BITS counts the number of case
2157    nodes handled by this test, typically the number of bits
2158    set in HI:LO.  */
2159 
2160 struct case_bit_test
2161 {
2162   HOST_WIDE_INT hi;
2163   HOST_WIDE_INT lo;
2164   rtx label;
2165   int bits;
2166 };
2167 
2168 /* Determine whether "1 << x" is relatively cheap in word_mode.  */
2169 
2170 static
lshift_cheap_p(void)2171 bool lshift_cheap_p (void)
2172 {
2173   static bool init = false;
2174   static bool cheap = true;
2175 
2176   if (!init)
2177     {
2178       rtx reg = gen_rtx_REG (word_mode, 10000);
2179       int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
2180       cheap = cost < COSTS_N_INSNS (3);
2181       init = true;
2182     }
2183 
2184   return cheap;
2185 }
2186 
2187 /* Comparison function for qsort to order bit tests by decreasing
2188    number of case nodes, i.e. the node with the most cases gets
2189    tested first.  */
2190 
2191 static int
case_bit_test_cmp(const void * p1,const void * p2)2192 case_bit_test_cmp (const void *p1, const void *p2)
2193 {
2194   const struct case_bit_test *d1 = p1;
2195   const struct case_bit_test *d2 = p2;
2196 
2197   if (d2->bits != d1->bits)
2198     return d2->bits - d1->bits;
2199 
2200   /* Stabilize the sort.  */
2201   return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2202 }
2203 
2204 /*  Expand a switch statement by a short sequence of bit-wise
2205     comparisons.  "switch(x)" is effectively converted into
2206     "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2207     integer constants.
2208 
2209     INDEX_EXPR is the value being switched on, which is of
2210     type INDEX_TYPE.  MINVAL is the lowest case value of in
2211     the case nodes, of INDEX_TYPE type, and RANGE is highest
2212     value minus MINVAL, also of type INDEX_TYPE.  NODES is
2213     the set of case nodes, and DEFAULT_LABEL is the label to
2214     branch to should none of the cases match.
2215 
2216     There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2217     node targets.  */
2218 
2219 static void
emit_case_bit_tests(tree index_type,tree index_expr,tree minval,tree range,case_node_ptr nodes,rtx default_label)2220 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2221 		     tree range, case_node_ptr nodes, rtx default_label)
2222 {
2223   struct case_bit_test test[MAX_CASE_BIT_TESTS];
2224   enum machine_mode mode;
2225   rtx expr, index, label;
2226   unsigned int i,j,lo,hi;
2227   struct case_node *n;
2228   unsigned int count;
2229 
2230   count = 0;
2231   for (n = nodes; n; n = n->right)
2232     {
2233       label = label_rtx (n->code_label);
2234       for (i = 0; i < count; i++)
2235 	if (label == test[i].label)
2236 	  break;
2237 
2238       if (i == count)
2239 	{
2240 	  gcc_assert (count < MAX_CASE_BIT_TESTS);
2241 	  test[i].hi = 0;
2242 	  test[i].lo = 0;
2243 	  test[i].label = label;
2244 	  test[i].bits = 1;
2245 	  count++;
2246 	}
2247       else
2248         test[i].bits++;
2249 
2250       lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2251 				      n->low, minval), 1);
2252       hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2253 				      n->high, minval), 1);
2254       for (j = lo; j <= hi; j++)
2255         if (j >= HOST_BITS_PER_WIDE_INT)
2256 	  test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2257 	else
2258 	  test[i].lo |= (HOST_WIDE_INT) 1 << j;
2259     }
2260 
2261   qsort (test, count, sizeof(*test), case_bit_test_cmp);
2262 
2263   index_expr = fold_build2 (MINUS_EXPR, index_type,
2264 			    fold_convert (index_type, index_expr),
2265 			    fold_convert (index_type, minval));
2266   index = expand_normal (index_expr);
2267   do_pending_stack_adjust ();
2268 
2269   mode = TYPE_MODE (index_type);
2270   expr = expand_normal (range);
2271   emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2272 			   default_label);
2273 
2274   index = convert_to_mode (word_mode, index, 0);
2275   index = expand_binop (word_mode, ashl_optab, const1_rtx,
2276 			index, NULL_RTX, 1, OPTAB_WIDEN);
2277 
2278   for (i = 0; i < count; i++)
2279     {
2280       expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2281       expr = expand_binop (word_mode, and_optab, index, expr,
2282 			   NULL_RTX, 1, OPTAB_WIDEN);
2283       emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2284 			       word_mode, 1, test[i].label);
2285     }
2286 
2287   emit_jump (default_label);
2288 }
2289 
2290 #ifndef HAVE_casesi
2291 #define HAVE_casesi 0
2292 #endif
2293 
2294 #ifndef HAVE_tablejump
2295 #define HAVE_tablejump 0
2296 #endif
2297 
2298 /* Terminate a case (Pascal/Ada) or switch (C) statement
2299    in which ORIG_INDEX is the expression to be tested.
2300    If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2301    type as given in the source before any compiler conversions.
2302    Generate the code to test it and jump to the right place.  */
2303 
2304 void
expand_case(tree exp)2305 expand_case (tree exp)
2306 {
2307   tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2308   rtx default_label = 0;
2309   struct case_node *n;
2310   unsigned int count, uniq;
2311   rtx index;
2312   rtx table_label;
2313   int ncases;
2314   rtx *labelvec;
2315   int i, fail;
2316   rtx before_case, end, lab;
2317 
2318   tree vec = SWITCH_LABELS (exp);
2319   tree orig_type = TREE_TYPE (exp);
2320   tree index_expr = SWITCH_COND (exp);
2321   tree index_type = TREE_TYPE (index_expr);
2322   int unsignedp = TYPE_UNSIGNED (index_type);
2323 
2324   /* The insn after which the case dispatch should finally
2325      be emitted.  Zero for a dummy.  */
2326   rtx start;
2327 
2328   /* A list of case labels; it is first built as a list and it may then
2329      be rearranged into a nearly balanced binary tree.  */
2330   struct case_node *case_list = 0;
2331 
2332   /* Label to jump to if no case matches.  */
2333   tree default_label_decl;
2334 
2335   /* The switch body is lowered in gimplify.c, we should never have
2336      switches with a non-NULL SWITCH_BODY here.  */
2337   gcc_assert (!SWITCH_BODY (exp));
2338   gcc_assert (SWITCH_LABELS (exp));
2339 
2340   do_pending_stack_adjust ();
2341 
2342   /* An ERROR_MARK occurs for various reasons including invalid data type.  */
2343   if (index_type != error_mark_node)
2344     {
2345       tree elt;
2346       bitmap label_bitmap;
2347 
2348       /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2349 	 expressions being INTEGER_CST.  */
2350       gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2351 
2352       /* The default case is at the end of TREE_VEC.  */
2353       elt = TREE_VEC_ELT (vec, TREE_VEC_LENGTH (vec) - 1);
2354       gcc_assert (!CASE_HIGH (elt));
2355       gcc_assert (!CASE_LOW (elt));
2356       default_label_decl = CASE_LABEL (elt);
2357 
2358       for (i = TREE_VEC_LENGTH (vec) - 1; --i >= 0; )
2359 	{
2360 	  tree low, high;
2361 	  elt = TREE_VEC_ELT (vec, i);
2362 
2363 	  low = CASE_LOW (elt);
2364 	  gcc_assert (low);
2365 	  high = CASE_HIGH (elt);
2366 
2367 	  /* Discard empty ranges.  */
2368 	  if (high && INT_CST_LT (high, low))
2369 	    continue;
2370 
2371 	  case_list = add_case_node (case_list, index_type, low, high,
2372 				     CASE_LABEL (elt));
2373 	}
2374 
2375 
2376       before_case = start = get_last_insn ();
2377       default_label = label_rtx (default_label_decl);
2378 
2379       /* Get upper and lower bounds of case values.  */
2380 
2381       uniq = 0;
2382       count = 0;
2383       label_bitmap = BITMAP_ALLOC (NULL);
2384       for (n = case_list; n; n = n->right)
2385 	{
2386 	  /* Count the elements and track the largest and smallest
2387 	     of them (treating them as signed even if they are not).  */
2388 	  if (count++ == 0)
2389 	    {
2390 	      minval = n->low;
2391 	      maxval = n->high;
2392 	    }
2393 	  else
2394 	    {
2395 	      if (INT_CST_LT (n->low, minval))
2396 		minval = n->low;
2397 	      if (INT_CST_LT (maxval, n->high))
2398 		maxval = n->high;
2399 	    }
2400 	  /* A range counts double, since it requires two compares.  */
2401 	  if (! tree_int_cst_equal (n->low, n->high))
2402 	    count++;
2403 
2404 	  /* If we have not seen this label yet, then increase the
2405 	     number of unique case node targets seen.  */
2406 	  lab = label_rtx (n->code_label);
2407 	  if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
2408 	    {
2409 	      bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
2410 	      uniq++;
2411 	    }
2412 	}
2413 
2414       BITMAP_FREE (label_bitmap);
2415 
2416       /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2417 	 destination, such as one with a default case only.  However,
2418 	 it doesn't remove cases that are out of range for the switch
2419 	 type, so we may still get a zero here.  */
2420       if (count == 0)
2421 	{
2422 	  emit_jump (default_label);
2423 	  return;
2424 	}
2425 
2426       /* Compute span of values.  */
2427       range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2428 
2429       /* Try implementing this switch statement by a short sequence of
2430 	 bit-wise comparisons.  However, we let the binary-tree case
2431 	 below handle constant index expressions.  */
2432       if (CASE_USE_BIT_TESTS
2433 	  && ! TREE_CONSTANT (index_expr)
2434 	  && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2435 	  && compare_tree_int (range, 0) > 0
2436 	  && lshift_cheap_p ()
2437 	  && ((uniq == 1 && count >= 3)
2438 	      || (uniq == 2 && count >= 5)
2439 	      || (uniq == 3 && count >= 6)))
2440 	{
2441 	  /* Optimize the case where all the case values fit in a
2442 	     word without having to subtract MINVAL.  In this case,
2443 	     we can optimize away the subtraction.  */
2444 	  if (compare_tree_int (minval, 0) > 0
2445 	      && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2446 	    {
2447 	      minval = build_int_cst (index_type, 0);
2448 	      range = maxval;
2449 	    }
2450 	  emit_case_bit_tests (index_type, index_expr, minval, range,
2451 			       case_list, default_label);
2452 	}
2453 
2454       /* If range of values is much bigger than number of values,
2455 	 make a sequence of conditional branches instead of a dispatch.
2456 	 If the switch-index is a constant, do it this way
2457 	 because we can optimize it.  */
2458 
2459       else if (count < case_values_threshold ()
2460 	       || compare_tree_int (range,
2461 				    (optimize_size ? 3 : 10) * count) > 0
2462 	       /* RANGE may be signed, and really large ranges will show up
2463 		  as negative numbers.  */
2464 	       || compare_tree_int (range, 0) < 0
2465 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2466 	       || flag_pic
2467 #endif
2468 	       || !flag_jump_tables
2469 	       || TREE_CONSTANT (index_expr)
2470 	       /* If neither casesi or tablejump is available, we can
2471 		  only go this way.  */
2472 	       || (!HAVE_casesi && !HAVE_tablejump))
2473 	{
2474 	  index = expand_normal (index_expr);
2475 
2476 	  /* If the index is a short or char that we do not have
2477 	     an insn to handle comparisons directly, convert it to
2478 	     a full integer now, rather than letting each comparison
2479 	     generate the conversion.  */
2480 
2481 	  if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2482 	      && ! have_insn_for (COMPARE, GET_MODE (index)))
2483 	    {
2484 	      enum machine_mode wider_mode;
2485 	      for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2486 		   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2487 		if (have_insn_for (COMPARE, wider_mode))
2488 		  {
2489 		    index = convert_to_mode (wider_mode, index, unsignedp);
2490 		    break;
2491 		  }
2492 	    }
2493 
2494 	  do_pending_stack_adjust ();
2495 
2496 	  if (MEM_P (index))
2497 	    index = copy_to_reg (index);
2498 
2499 	  /* We generate a binary decision tree to select the
2500 	     appropriate target code.  This is done as follows:
2501 
2502 	     The list of cases is rearranged into a binary tree,
2503 	     nearly optimal assuming equal probability for each case.
2504 
2505 	     The tree is transformed into RTL, eliminating
2506 	     redundant test conditions at the same time.
2507 
2508 	     If program flow could reach the end of the
2509 	     decision tree an unconditional jump to the
2510 	     default code is emitted.  */
2511 
2512 	  use_cost_table
2513 	    = (TREE_CODE (orig_type) != ENUMERAL_TYPE
2514 	       && estimate_case_costs (case_list));
2515 	  balance_case_nodes (&case_list, NULL);
2516 	  emit_case_nodes (index, case_list, default_label, index_type);
2517 	  emit_jump (default_label);
2518 	}
2519       else
2520 	{
2521 	  table_label = gen_label_rtx ();
2522 	  if (! try_casesi (index_type, index_expr, minval, range,
2523 			    table_label, default_label))
2524 	    {
2525 	      bool ok;
2526 
2527 	      /* Index jumptables from zero for suitable values of
2528                  minval to avoid a subtraction.  */
2529 	      if (! optimize_size
2530 		  && compare_tree_int (minval, 0) > 0
2531 		  && compare_tree_int (minval, 3) < 0)
2532 		{
2533 		  minval = build_int_cst (index_type, 0);
2534 		  range = maxval;
2535 		}
2536 
2537 	      ok = try_tablejump (index_type, index_expr, minval, range,
2538 				  table_label, default_label);
2539 	      gcc_assert (ok);
2540 	    }
2541 
2542 	  /* Get table of labels to jump to, in order of case index.  */
2543 
2544 	  ncases = tree_low_cst (range, 0) + 1;
2545 	  labelvec = alloca (ncases * sizeof (rtx));
2546 	  memset (labelvec, 0, ncases * sizeof (rtx));
2547 
2548 	  for (n = case_list; n; n = n->right)
2549 	    {
2550 	      /* Compute the low and high bounds relative to the minimum
2551 		 value since that should fit in a HOST_WIDE_INT while the
2552 		 actual values may not.  */
2553 	      HOST_WIDE_INT i_low
2554 		= tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2555 					     n->low, minval), 1);
2556 	      HOST_WIDE_INT i_high
2557 		= tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2558 					     n->high, minval), 1);
2559 	      HOST_WIDE_INT i;
2560 
2561 	      for (i = i_low; i <= i_high; i ++)
2562 		labelvec[i]
2563 		  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2564 	    }
2565 
2566 	  /* Fill in the gaps with the default.  */
2567 	  for (i = 0; i < ncases; i++)
2568 	    if (labelvec[i] == 0)
2569 	      labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2570 
2571 	  /* Output the table.  */
2572 	  emit_label (table_label);
2573 
2574 	  if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2575 	    emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2576 						   gen_rtx_LABEL_REF (Pmode, table_label),
2577 						   gen_rtvec_v (ncases, labelvec),
2578 						   const0_rtx, const0_rtx));
2579 	  else
2580 	    emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2581 					      gen_rtvec_v (ncases, labelvec)));
2582 
2583 	  /* Record no drop-through after the table.  */
2584 	  emit_barrier ();
2585 	}
2586 
2587       before_case = NEXT_INSN (before_case);
2588       end = get_last_insn ();
2589       fail = squeeze_notes (&before_case, &end);
2590       gcc_assert (!fail);
2591       reorder_insns (before_case, end, start);
2592     }
2593 
2594   free_temp_slots ();
2595 }
2596 
2597 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE.  */
2598 
2599 static void
do_jump_if_equal(enum machine_mode mode,rtx op0,rtx op1,rtx label,int unsignedp)2600 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2601 		  int unsignedp)
2602 {
2603   do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2604 			   NULL_RTX, NULL_RTX, label);
2605 }
2606 
2607 /* Not all case values are encountered equally.  This function
2608    uses a heuristic to weight case labels, in cases where that
2609    looks like a reasonable thing to do.
2610 
2611    Right now, all we try to guess is text, and we establish the
2612    following weights:
2613 
2614 	chars above space:	16
2615 	digits:			16
2616 	default:		12
2617 	space, punct:		8
2618 	tab:			4
2619 	newline:		2
2620 	other "\" chars:	1
2621 	remaining chars:	0
2622 
2623    If we find any cases in the switch that are not either -1 or in the range
2624    of valid ASCII characters, or are control characters other than those
2625    commonly used with "\", don't treat this switch scanning text.
2626 
2627    Return 1 if these nodes are suitable for cost estimation, otherwise
2628    return 0.  */
2629 
2630 static int
estimate_case_costs(case_node_ptr node)2631 estimate_case_costs (case_node_ptr node)
2632 {
2633   tree min_ascii = integer_minus_one_node;
2634   tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2635   case_node_ptr n;
2636   int i;
2637 
2638   /* If we haven't already made the cost table, make it now.  Note that the
2639      lower bound of the table is -1, not zero.  */
2640 
2641   if (! cost_table_initialized)
2642     {
2643       cost_table_initialized = 1;
2644 
2645       for (i = 0; i < 128; i++)
2646 	{
2647 	  if (ISALNUM (i))
2648 	    COST_TABLE (i) = 16;
2649 	  else if (ISPUNCT (i))
2650 	    COST_TABLE (i) = 8;
2651 	  else if (ISCNTRL (i))
2652 	    COST_TABLE (i) = -1;
2653 	}
2654 
2655       COST_TABLE (' ') = 8;
2656       COST_TABLE ('\t') = 4;
2657       COST_TABLE ('\0') = 4;
2658       COST_TABLE ('\n') = 2;
2659       COST_TABLE ('\f') = 1;
2660       COST_TABLE ('\v') = 1;
2661       COST_TABLE ('\b') = 1;
2662     }
2663 
2664   /* See if all the case expressions look like text.  It is text if the
2665      constant is >= -1 and the highest constant is <= 127.  Do all comparisons
2666      as signed arithmetic since we don't want to ever access cost_table with a
2667      value less than -1.  Also check that none of the constants in a range
2668      are strange control characters.  */
2669 
2670   for (n = node; n; n = n->right)
2671     {
2672       if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
2673 	return 0;
2674 
2675       for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2676 	   i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2677 	if (COST_TABLE (i) < 0)
2678 	  return 0;
2679     }
2680 
2681   /* All interesting values are within the range of interesting
2682      ASCII characters.  */
2683   return 1;
2684 }
2685 
2686 /* Take an ordered list of case nodes
2687    and transform them into a near optimal binary tree,
2688    on the assumption that any target code selection value is as
2689    likely as any other.
2690 
2691    The transformation is performed by splitting the ordered
2692    list into two equal sections plus a pivot.  The parts are
2693    then attached to the pivot as left and right branches.  Each
2694    branch is then transformed recursively.  */
2695 
2696 static void
balance_case_nodes(case_node_ptr * head,case_node_ptr parent)2697 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2698 {
2699   case_node_ptr np;
2700 
2701   np = *head;
2702   if (np)
2703     {
2704       int cost = 0;
2705       int i = 0;
2706       int ranges = 0;
2707       case_node_ptr *npp;
2708       case_node_ptr left;
2709 
2710       /* Count the number of entries on branch.  Also count the ranges.  */
2711 
2712       while (np)
2713 	{
2714 	  if (!tree_int_cst_equal (np->low, np->high))
2715 	    {
2716 	      ranges++;
2717 	      if (use_cost_table)
2718 		cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2719 	    }
2720 
2721 	  if (use_cost_table)
2722 	    cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2723 
2724 	  i++;
2725 	  np = np->right;
2726 	}
2727 
2728       if (i > 2)
2729 	{
2730 	  /* Split this list if it is long enough for that to help.  */
2731 	  npp = head;
2732 	  left = *npp;
2733 	  if (use_cost_table)
2734 	    {
2735 	      /* Find the place in the list that bisects the list's total cost,
2736 		 Here I gets half the total cost.  */
2737 	      int n_moved = 0;
2738 	      i = (cost + 1) / 2;
2739 	      while (1)
2740 		{
2741 		  /* Skip nodes while their cost does not reach that amount.  */
2742 		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2743 		    i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2744 		  i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2745 		  if (i <= 0)
2746 		    break;
2747 		  npp = &(*npp)->right;
2748 		  n_moved += 1;
2749 		}
2750 	      if (n_moved == 0)
2751 		{
2752 		  /* Leave this branch lopsided, but optimize left-hand
2753 		     side and fill in `parent' fields for right-hand side.  */
2754 		  np = *head;
2755 		  np->parent = parent;
2756 		  balance_case_nodes (&np->left, np);
2757 		  for (; np->right; np = np->right)
2758 		    np->right->parent = np;
2759 		  return;
2760 		}
2761 	    }
2762 	  /* If there are just three nodes, split at the middle one.  */
2763 	  else if (i == 3)
2764 	    npp = &(*npp)->right;
2765 	  else
2766 	    {
2767 	      /* Find the place in the list that bisects the list's total cost,
2768 		 where ranges count as 2.
2769 		 Here I gets half the total cost.  */
2770 	      i = (i + ranges + 1) / 2;
2771 	      while (1)
2772 		{
2773 		  /* Skip nodes while their cost does not reach that amount.  */
2774 		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2775 		    i--;
2776 		  i--;
2777 		  if (i <= 0)
2778 		    break;
2779 		  npp = &(*npp)->right;
2780 		}
2781 	    }
2782 	  *head = np = *npp;
2783 	  *npp = 0;
2784 	  np->parent = parent;
2785 	  np->left = left;
2786 
2787 	  /* Optimize each of the two split parts.  */
2788 	  balance_case_nodes (&np->left, np);
2789 	  balance_case_nodes (&np->right, np);
2790 	}
2791       else
2792 	{
2793 	  /* Else leave this branch as one level,
2794 	     but fill in `parent' fields.  */
2795 	  np = *head;
2796 	  np->parent = parent;
2797 	  for (; np->right; np = np->right)
2798 	    np->right->parent = np;
2799 	}
2800     }
2801 }
2802 
2803 /* Search the parent sections of the case node tree
2804    to see if a test for the lower bound of NODE would be redundant.
2805    INDEX_TYPE is the type of the index expression.
2806 
2807    The instructions to generate the case decision tree are
2808    output in the same order as nodes are processed so it is
2809    known that if a parent node checks the range of the current
2810    node minus one that the current node is bounded at its lower
2811    span.  Thus the test would be redundant.  */
2812 
2813 static int
node_has_low_bound(case_node_ptr node,tree index_type)2814 node_has_low_bound (case_node_ptr node, tree index_type)
2815 {
2816   tree low_minus_one;
2817   case_node_ptr pnode;
2818 
2819   /* If the lower bound of this node is the lowest value in the index type,
2820      we need not test it.  */
2821 
2822   if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2823     return 1;
2824 
2825   /* If this node has a left branch, the value at the left must be less
2826      than that at this node, so it cannot be bounded at the bottom and
2827      we need not bother testing any further.  */
2828 
2829   if (node->left)
2830     return 0;
2831 
2832   low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2833 			       node->low,
2834 			       build_int_cst (TREE_TYPE (node->low), 1));
2835 
2836   /* If the subtraction above overflowed, we can't verify anything.
2837      Otherwise, look for a parent that tests our value - 1.  */
2838 
2839   if (! tree_int_cst_lt (low_minus_one, node->low))
2840     return 0;
2841 
2842   for (pnode = node->parent; pnode; pnode = pnode->parent)
2843     if (tree_int_cst_equal (low_minus_one, pnode->high))
2844       return 1;
2845 
2846   return 0;
2847 }
2848 
2849 /* Search the parent sections of the case node tree
2850    to see if a test for the upper bound of NODE would be redundant.
2851    INDEX_TYPE is the type of the index expression.
2852 
2853    The instructions to generate the case decision tree are
2854    output in the same order as nodes are processed so it is
2855    known that if a parent node checks the range of the current
2856    node plus one that the current node is bounded at its upper
2857    span.  Thus the test would be redundant.  */
2858 
2859 static int
node_has_high_bound(case_node_ptr node,tree index_type)2860 node_has_high_bound (case_node_ptr node, tree index_type)
2861 {
2862   tree high_plus_one;
2863   case_node_ptr pnode;
2864 
2865   /* If there is no upper bound, obviously no test is needed.  */
2866 
2867   if (TYPE_MAX_VALUE (index_type) == NULL)
2868     return 1;
2869 
2870   /* If the upper bound of this node is the highest value in the type
2871      of the index expression, we need not test against it.  */
2872 
2873   if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2874     return 1;
2875 
2876   /* If this node has a right branch, the value at the right must be greater
2877      than that at this node, so it cannot be bounded at the top and
2878      we need not bother testing any further.  */
2879 
2880   if (node->right)
2881     return 0;
2882 
2883   high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2884 			       node->high,
2885 			       build_int_cst (TREE_TYPE (node->high), 1));
2886 
2887   /* If the addition above overflowed, we can't verify anything.
2888      Otherwise, look for a parent that tests our value + 1.  */
2889 
2890   if (! tree_int_cst_lt (node->high, high_plus_one))
2891     return 0;
2892 
2893   for (pnode = node->parent; pnode; pnode = pnode->parent)
2894     if (tree_int_cst_equal (high_plus_one, pnode->low))
2895       return 1;
2896 
2897   return 0;
2898 }
2899 
2900 /* Search the parent sections of the
2901    case node tree to see if both tests for the upper and lower
2902    bounds of NODE would be redundant.  */
2903 
2904 static int
node_is_bounded(case_node_ptr node,tree index_type)2905 node_is_bounded (case_node_ptr node, tree index_type)
2906 {
2907   return (node_has_low_bound (node, index_type)
2908 	  && node_has_high_bound (node, index_type));
2909 }
2910 
2911 /* Emit step-by-step code to select a case for the value of INDEX.
2912    The thus generated decision tree follows the form of the
2913    case-node binary tree NODE, whose nodes represent test conditions.
2914    INDEX_TYPE is the type of the index of the switch.
2915 
2916    Care is taken to prune redundant tests from the decision tree
2917    by detecting any boundary conditions already checked by
2918    emitted rtx.  (See node_has_high_bound, node_has_low_bound
2919    and node_is_bounded, above.)
2920 
2921    Where the test conditions can be shown to be redundant we emit
2922    an unconditional jump to the target code.  As a further
2923    optimization, the subordinates of a tree node are examined to
2924    check for bounded nodes.  In this case conditional and/or
2925    unconditional jumps as a result of the boundary check for the
2926    current node are arranged to target the subordinates associated
2927    code for out of bound conditions on the current node.
2928 
2929    We can assume that when control reaches the code generated here,
2930    the index value has already been compared with the parents
2931    of this node, and determined to be on the same side of each parent
2932    as this node is.  Thus, if this node tests for the value 51,
2933    and a parent tested for 52, we don't need to consider
2934    the possibility of a value greater than 51.  If another parent
2935    tests for the value 50, then this node need not test anything.  */
2936 
2937 static void
emit_case_nodes(rtx index,case_node_ptr node,rtx default_label,tree index_type)2938 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2939 		 tree index_type)
2940 {
2941   /* If INDEX has an unsigned type, we must make unsigned branches.  */
2942   int unsignedp = TYPE_UNSIGNED (index_type);
2943   enum machine_mode mode = GET_MODE (index);
2944   enum machine_mode imode = TYPE_MODE (index_type);
2945 
2946   /* Handle indices detected as constant during RTL expansion.  */
2947   if (mode == VOIDmode)
2948     mode = imode;
2949 
2950   /* See if our parents have already tested everything for us.
2951      If they have, emit an unconditional jump for this node.  */
2952   if (node_is_bounded (node, index_type))
2953     emit_jump (label_rtx (node->code_label));
2954 
2955   else if (tree_int_cst_equal (node->low, node->high))
2956     {
2957       /* Node is single valued.  First see if the index expression matches
2958 	 this node and then check our children, if any.  */
2959 
2960       do_jump_if_equal (mode, index,
2961 			convert_modes (mode, imode,
2962 				       expand_normal (node->low),
2963 				       unsignedp),
2964 			label_rtx (node->code_label), unsignedp);
2965 
2966       if (node->right != 0 && node->left != 0)
2967 	{
2968 	  /* This node has children on both sides.
2969 	     Dispatch to one side or the other
2970 	     by comparing the index value with this node's value.
2971 	     If one subtree is bounded, check that one first,
2972 	     so we can avoid real branches in the tree.  */
2973 
2974 	  if (node_is_bounded (node->right, index_type))
2975 	    {
2976 	      emit_cmp_and_jump_insns (index,
2977 				       convert_modes
2978 				       (mode, imode,
2979 					expand_normal (node->high),
2980 					unsignedp),
2981 				       GT, NULL_RTX, mode, unsignedp,
2982 				       label_rtx (node->right->code_label));
2983 	      emit_case_nodes (index, node->left, default_label, index_type);
2984 	    }
2985 
2986 	  else if (node_is_bounded (node->left, index_type))
2987 	    {
2988 	      emit_cmp_and_jump_insns (index,
2989 				       convert_modes
2990 				       (mode, imode,
2991 					expand_normal (node->high),
2992 					unsignedp),
2993 				       LT, NULL_RTX, mode, unsignedp,
2994 				       label_rtx (node->left->code_label));
2995 	      emit_case_nodes (index, node->right, default_label, index_type);
2996 	    }
2997 
2998 	  /* If both children are single-valued cases with no
2999 	     children, finish up all the work.  This way, we can save
3000 	     one ordered comparison.  */
3001 	  else if (tree_int_cst_equal (node->right->low, node->right->high)
3002 		   && node->right->left == 0
3003 		   && node->right->right == 0
3004 		   && tree_int_cst_equal (node->left->low, node->left->high)
3005 		   && node->left->left == 0
3006 		   && node->left->right == 0)
3007 	    {
3008 	      /* Neither node is bounded.  First distinguish the two sides;
3009 		 then emit the code for one side at a time.  */
3010 
3011 	      /* See if the value matches what the right hand side
3012 		 wants.  */
3013 	      do_jump_if_equal (mode, index,
3014 				convert_modes (mode, imode,
3015 					       expand_normal (node->right->low),
3016 					       unsignedp),
3017 				label_rtx (node->right->code_label),
3018 				unsignedp);
3019 
3020 	      /* See if the value matches what the left hand side
3021 		 wants.  */
3022 	      do_jump_if_equal (mode, index,
3023 				convert_modes (mode, imode,
3024 					       expand_normal (node->left->low),
3025 					       unsignedp),
3026 				label_rtx (node->left->code_label),
3027 				unsignedp);
3028 	    }
3029 
3030 	  else
3031 	    {
3032 	      /* Neither node is bounded.  First distinguish the two sides;
3033 		 then emit the code for one side at a time.  */
3034 
3035 	      tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3036 
3037 	      /* See if the value is on the right.  */
3038 	      emit_cmp_and_jump_insns (index,
3039 				       convert_modes
3040 				       (mode, imode,
3041 					expand_normal (node->high),
3042 					unsignedp),
3043 				       GT, NULL_RTX, mode, unsignedp,
3044 				       label_rtx (test_label));
3045 
3046 	      /* Value must be on the left.
3047 		 Handle the left-hand subtree.  */
3048 	      emit_case_nodes (index, node->left, default_label, index_type);
3049 	      /* If left-hand subtree does nothing,
3050 		 go to default.  */
3051 	      emit_jump (default_label);
3052 
3053 	      /* Code branches here for the right-hand subtree.  */
3054 	      expand_label (test_label);
3055 	      emit_case_nodes (index, node->right, default_label, index_type);
3056 	    }
3057 	}
3058 
3059       else if (node->right != 0 && node->left == 0)
3060 	{
3061 	  /* Here we have a right child but no left so we issue a conditional
3062 	     branch to default and process the right child.
3063 
3064 	     Omit the conditional branch to default if the right child
3065 	     does not have any children and is single valued; it would
3066 	     cost too much space to save so little time.  */
3067 
3068 	  if (node->right->right || node->right->left
3069 	      || !tree_int_cst_equal (node->right->low, node->right->high))
3070 	    {
3071 	      if (!node_has_low_bound (node, index_type))
3072 		{
3073 		  emit_cmp_and_jump_insns (index,
3074 					   convert_modes
3075 					   (mode, imode,
3076 					    expand_normal (node->high),
3077 					    unsignedp),
3078 					   LT, NULL_RTX, mode, unsignedp,
3079 					   default_label);
3080 		}
3081 
3082 	      emit_case_nodes (index, node->right, default_label, index_type);
3083 	    }
3084 	  else
3085 	    /* We cannot process node->right normally
3086 	       since we haven't ruled out the numbers less than
3087 	       this node's value.  So handle node->right explicitly.  */
3088 	    do_jump_if_equal (mode, index,
3089 			      convert_modes
3090 			      (mode, imode,
3091 			       expand_normal (node->right->low),
3092 			       unsignedp),
3093 			      label_rtx (node->right->code_label), unsignedp);
3094 	}
3095 
3096       else if (node->right == 0 && node->left != 0)
3097 	{
3098 	  /* Just one subtree, on the left.  */
3099 	  if (node->left->left || node->left->right
3100 	      || !tree_int_cst_equal (node->left->low, node->left->high))
3101 	    {
3102 	      if (!node_has_high_bound (node, index_type))
3103 		{
3104 		  emit_cmp_and_jump_insns (index,
3105 					   convert_modes
3106 					   (mode, imode,
3107 					    expand_normal (node->high),
3108 					    unsignedp),
3109 					   GT, NULL_RTX, mode, unsignedp,
3110 					   default_label);
3111 		}
3112 
3113 	      emit_case_nodes (index, node->left, default_label, index_type);
3114 	    }
3115 	  else
3116 	    /* We cannot process node->left normally
3117 	       since we haven't ruled out the numbers less than
3118 	       this node's value.  So handle node->left explicitly.  */
3119 	    do_jump_if_equal (mode, index,
3120 			      convert_modes
3121 			      (mode, imode,
3122 			       expand_normal (node->left->low),
3123 			       unsignedp),
3124 			      label_rtx (node->left->code_label), unsignedp);
3125 	}
3126     }
3127   else
3128     {
3129       /* Node is a range.  These cases are very similar to those for a single
3130 	 value, except that we do not start by testing whether this node
3131 	 is the one to branch to.  */
3132 
3133       if (node->right != 0 && node->left != 0)
3134 	{
3135 	  /* Node has subtrees on both sides.
3136 	     If the right-hand subtree is bounded,
3137 	     test for it first, since we can go straight there.
3138 	     Otherwise, we need to make a branch in the control structure,
3139 	     then handle the two subtrees.  */
3140 	  tree test_label = 0;
3141 
3142 	  if (node_is_bounded (node->right, index_type))
3143 	    /* Right hand node is fully bounded so we can eliminate any
3144 	       testing and branch directly to the target code.  */
3145 	    emit_cmp_and_jump_insns (index,
3146 				     convert_modes
3147 				     (mode, imode,
3148 				      expand_normal (node->high),
3149 				      unsignedp),
3150 				     GT, NULL_RTX, mode, unsignedp,
3151 				     label_rtx (node->right->code_label));
3152 	  else
3153 	    {
3154 	      /* Right hand node requires testing.
3155 		 Branch to a label where we will handle it later.  */
3156 
3157 	      test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3158 	      emit_cmp_and_jump_insns (index,
3159 				       convert_modes
3160 				       (mode, imode,
3161 					expand_normal (node->high),
3162 					unsignedp),
3163 				       GT, NULL_RTX, mode, unsignedp,
3164 				       label_rtx (test_label));
3165 	    }
3166 
3167 	  /* Value belongs to this node or to the left-hand subtree.  */
3168 
3169 	  emit_cmp_and_jump_insns (index,
3170 				   convert_modes
3171 				   (mode, imode,
3172 				    expand_normal (node->low),
3173 				    unsignedp),
3174 				   GE, NULL_RTX, mode, unsignedp,
3175 				   label_rtx (node->code_label));
3176 
3177 	  /* Handle the left-hand subtree.  */
3178 	  emit_case_nodes (index, node->left, default_label, index_type);
3179 
3180 	  /* If right node had to be handled later, do that now.  */
3181 
3182 	  if (test_label)
3183 	    {
3184 	      /* If the left-hand subtree fell through,
3185 		 don't let it fall into the right-hand subtree.  */
3186 	      emit_jump (default_label);
3187 
3188 	      expand_label (test_label);
3189 	      emit_case_nodes (index, node->right, default_label, index_type);
3190 	    }
3191 	}
3192 
3193       else if (node->right != 0 && node->left == 0)
3194 	{
3195 	  /* Deal with values to the left of this node,
3196 	     if they are possible.  */
3197 	  if (!node_has_low_bound (node, index_type))
3198 	    {
3199 	      emit_cmp_and_jump_insns (index,
3200 				       convert_modes
3201 				       (mode, imode,
3202 					expand_normal (node->low),
3203 					unsignedp),
3204 				       LT, NULL_RTX, mode, unsignedp,
3205 				       default_label);
3206 	    }
3207 
3208 	  /* Value belongs to this node or to the right-hand subtree.  */
3209 
3210 	  emit_cmp_and_jump_insns (index,
3211 				   convert_modes
3212 				   (mode, imode,
3213 				    expand_normal (node->high),
3214 				    unsignedp),
3215 				   LE, NULL_RTX, mode, unsignedp,
3216 				   label_rtx (node->code_label));
3217 
3218 	  emit_case_nodes (index, node->right, default_label, index_type);
3219 	}
3220 
3221       else if (node->right == 0 && node->left != 0)
3222 	{
3223 	  /* Deal with values to the right of this node,
3224 	     if they are possible.  */
3225 	  if (!node_has_high_bound (node, index_type))
3226 	    {
3227 	      emit_cmp_and_jump_insns (index,
3228 				       convert_modes
3229 				       (mode, imode,
3230 					expand_normal (node->high),
3231 					unsignedp),
3232 				       GT, NULL_RTX, mode, unsignedp,
3233 				       default_label);
3234 	    }
3235 
3236 	  /* Value belongs to this node or to the left-hand subtree.  */
3237 
3238 	  emit_cmp_and_jump_insns (index,
3239 				   convert_modes
3240 				   (mode, imode,
3241 				    expand_normal (node->low),
3242 				    unsignedp),
3243 				   GE, NULL_RTX, mode, unsignedp,
3244 				   label_rtx (node->code_label));
3245 
3246 	  emit_case_nodes (index, node->left, default_label, index_type);
3247 	}
3248 
3249       else
3250 	{
3251 	  /* Node has no children so we check low and high bounds to remove
3252 	     redundant tests.  Only one of the bounds can exist,
3253 	     since otherwise this node is bounded--a case tested already.  */
3254 	  int high_bound = node_has_high_bound (node, index_type);
3255 	  int low_bound = node_has_low_bound (node, index_type);
3256 
3257 	  if (!high_bound && low_bound)
3258 	    {
3259 	      emit_cmp_and_jump_insns (index,
3260 				       convert_modes
3261 				       (mode, imode,
3262 					expand_normal (node->high),
3263 					unsignedp),
3264 				       GT, NULL_RTX, mode, unsignedp,
3265 				       default_label);
3266 	    }
3267 
3268 	  else if (!low_bound && high_bound)
3269 	    {
3270 	      emit_cmp_and_jump_insns (index,
3271 				       convert_modes
3272 				       (mode, imode,
3273 					expand_normal (node->low),
3274 					unsignedp),
3275 				       LT, NULL_RTX, mode, unsignedp,
3276 				       default_label);
3277 	    }
3278 	  else if (!low_bound && !high_bound)
3279 	    {
3280 	      /* Widen LOW and HIGH to the same width as INDEX.  */
3281 	      tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3282 	      tree low = build1 (CONVERT_EXPR, type, node->low);
3283 	      tree high = build1 (CONVERT_EXPR, type, node->high);
3284 	      rtx low_rtx, new_index, new_bound;
3285 
3286 	      /* Instead of doing two branches, emit one unsigned branch for
3287 		 (index-low) > (high-low).  */
3288 	      low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3289 	      new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3290 					       NULL_RTX, unsignedp,
3291 					       OPTAB_WIDEN);
3292 	      new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3293 						    high, low),
3294 				       NULL_RTX, mode, EXPAND_NORMAL);
3295 
3296 	      emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3297 				       mode, 1, default_label);
3298 	    }
3299 
3300 	  emit_jump (label_rtx (node->code_label));
3301 	}
3302     }
3303 }
3304