1 /* A pass for lowering trees to RTL.
2    Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber.  */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "tree-inline.h"
65 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
68 #include "cfgloop.h"
69 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
70 #include "stringpool.h"
71 #include "attribs.h"
72 #include "asan.h"
73 #include "tree-ssa-address.h"
74 #include "output.h"
75 #include "builtins.h"
76 
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79    give the same symbol without quotes for an alternative entry point.  You
80    must define both, or neither.  */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
84 
85 /* This variable holds information helping the rewriting of SSA trees
86    into RTL.  */
87 struct ssaexpand SA;
88 
89 /* This variable holds the currently expanded gimple statement for purposes
90    of comminucating the profile info to the builtin expanders.  */
91 gimple *currently_expanding_gimple_stmt;
92 
93 static rtx expand_debug_expr (tree);
94 
95 static bool defer_stack_allocation (tree, bool);
96 
97 static void record_alignment_for_reg_var (unsigned int);
98 
99 /* Return an expression tree corresponding to the RHS of GIMPLE
100    statement STMT.  */
101 
102 tree
gimple_assign_rhs_to_tree(gimple * stmt)103 gimple_assign_rhs_to_tree (gimple *stmt)
104 {
105   tree t;
106   switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
107     {
108     case GIMPLE_TERNARY_RHS:
109       t = build3 (gimple_assign_rhs_code (stmt),
110 		  TREE_TYPE (gimple_assign_lhs (stmt)),
111 		  gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt),
112 		  gimple_assign_rhs3 (stmt));
113       break;
114     case GIMPLE_BINARY_RHS:
115       t = build2 (gimple_assign_rhs_code (stmt),
116 		  TREE_TYPE (gimple_assign_lhs (stmt)),
117 		  gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
118       break;
119     case GIMPLE_UNARY_RHS:
120       t = build1 (gimple_assign_rhs_code (stmt),
121 		  TREE_TYPE (gimple_assign_lhs (stmt)),
122 		  gimple_assign_rhs1 (stmt));
123       break;
124     case GIMPLE_SINGLE_RHS:
125       {
126 	t = gimple_assign_rhs1 (stmt);
127 	/* Avoid modifying this tree in place below.  */
128 	if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
129 	     && gimple_location (stmt) != EXPR_LOCATION (t))
130 	    || (gimple_block (stmt) && currently_expanding_to_rtl
131 		&& EXPR_P (t)))
132 	  t = copy_node (t);
133 	break;
134       }
135     default:
136       gcc_unreachable ();
137     }
138 
139   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
140     SET_EXPR_LOCATION (t, gimple_location (stmt));
141 
142   return t;
143 }
144 
145 
146 #ifndef STACK_ALIGNMENT_NEEDED
147 #define STACK_ALIGNMENT_NEEDED 1
148 #endif
149 
150 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
151 
152 /* Choose either CUR or NEXT as the leader DECL for a partition.
153    Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154    out of the same user variable being in multiple partitions (this is
155    less likely for compiler-introduced temps).  */
156 
157 static tree
leader_merge(tree cur,tree next)158 leader_merge (tree cur, tree next)
159 {
160   if (cur == NULL || cur == next)
161     return next;
162 
163   if (DECL_P (cur) && DECL_IGNORED_P (cur))
164     return cur;
165 
166   if (DECL_P (next) && DECL_IGNORED_P (next))
167     return next;
168 
169   return cur;
170 }
171 
172 /* Associate declaration T with storage space X.  If T is no
173    SSA name this is exactly SET_DECL_RTL, otherwise make the
174    partition of T associated with X.  */
175 static inline void
set_rtl(tree t,rtx x)176 set_rtl (tree t, rtx x)
177 {
178   gcc_checking_assert (!x
179 		       || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
180 		       || (use_register_for_decl (t)
181 			   ? (REG_P (x)
182 			      || (GET_CODE (x) == CONCAT
183 				  && (REG_P (XEXP (x, 0))
184 				      || SUBREG_P (XEXP (x, 0)))
185 				  && (REG_P (XEXP (x, 1))
186 				      || SUBREG_P (XEXP (x, 1))))
187 			      /* We need to accept PARALLELs for RESUT_DECLs
188 				 because of vector types with BLKmode returned
189 				 in multiple registers, but they are supposed
190 				 to be uncoalesced.  */
191 			      || (GET_CODE (x) == PARALLEL
192 				  && SSAVAR (t)
193 				  && TREE_CODE (SSAVAR (t)) == RESULT_DECL
194 				  && (GET_MODE (x) == BLKmode
195 				      || !flag_tree_coalesce_vars)))
196 			   : (MEM_P (x) || x == pc_rtx
197 			      || (GET_CODE (x) == CONCAT
198 				  && MEM_P (XEXP (x, 0))
199 				  && MEM_P (XEXP (x, 1))))));
200   /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
201      RESULT_DECLs has the expected mode.  For memory, we accept
202      unpromoted modes, since that's what we're likely to get.  For
203      PARM_DECLs and RESULT_DECLs, we'll have been called by
204      set_parm_rtl, which will give us the default def, so we don't
205      have to compute it ourselves.  For RESULT_DECLs, we accept mode
206      mismatches too, as long as we have BLKmode or are not coalescing
207      across variables, so that we don't reject BLKmode PARALLELs or
208      unpromoted REGs.  */
209   gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
210 		       || (SSAVAR (t)
211 			   && TREE_CODE (SSAVAR (t)) == RESULT_DECL
212 			   && (promote_ssa_mode (t, NULL) == BLKmode
213 			       || !flag_tree_coalesce_vars))
214 		       || !use_register_for_decl (t)
215 		       || GET_MODE (x) == promote_ssa_mode (t, NULL));
216 
217   if (x)
218     {
219       bool skip = false;
220       tree cur = NULL_TREE;
221       rtx xm = x;
222 
223     retry:
224       if (MEM_P (xm))
225 	cur = MEM_EXPR (xm);
226       else if (REG_P (xm))
227 	cur = REG_EXPR (xm);
228       else if (SUBREG_P (xm))
229 	{
230 	  gcc_assert (subreg_lowpart_p (xm));
231 	  xm = SUBREG_REG (xm);
232 	  goto retry;
233 	}
234       else if (GET_CODE (xm) == CONCAT)
235 	{
236 	  xm = XEXP (xm, 0);
237 	  goto retry;
238 	}
239       else if (GET_CODE (xm) == PARALLEL)
240 	{
241 	  xm = XVECEXP (xm, 0, 0);
242 	  gcc_assert (GET_CODE (xm) == EXPR_LIST);
243 	  xm = XEXP (xm, 0);
244 	  goto retry;
245 	}
246       else if (xm == pc_rtx)
247 	skip = true;
248       else
249 	gcc_unreachable ();
250 
251       tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
252 
253       if (cur != next)
254 	{
255 	  if (MEM_P (x))
256 	    set_mem_attributes (x,
257 				next && TREE_CODE (next) == SSA_NAME
258 				? TREE_TYPE (next)
259 				: next, true);
260 	  else
261 	    set_reg_attrs_for_decl_rtl (next, x);
262 	}
263     }
264 
265   if (TREE_CODE (t) == SSA_NAME)
266     {
267       int part = var_to_partition (SA.map, t);
268       if (part != NO_PARTITION)
269 	{
270 	  if (SA.partition_to_pseudo[part])
271 	    gcc_assert (SA.partition_to_pseudo[part] == x);
272 	  else if (x != pc_rtx)
273 	    SA.partition_to_pseudo[part] = x;
274 	}
275       /* For the benefit of debug information at -O0 (where
276          vartracking doesn't run) record the place also in the base
277          DECL.  For PARMs and RESULTs, do so only when setting the
278          default def.  */
279       if (x && x != pc_rtx && SSA_NAME_VAR (t)
280 	  && (VAR_P (SSA_NAME_VAR (t))
281 	      || SSA_NAME_IS_DEFAULT_DEF (t)))
282 	{
283 	  tree var = SSA_NAME_VAR (t);
284 	  /* If we don't yet have something recorded, just record it now.  */
285 	  if (!DECL_RTL_SET_P (var))
286 	    SET_DECL_RTL (var, x);
287 	  /* If we have it set already to "multiple places" don't
288 	     change this.  */
289 	  else if (DECL_RTL (var) == pc_rtx)
290 	    ;
291 	  /* If we have something recorded and it's not the same place
292 	     as we want to record now, we have multiple partitions for the
293 	     same base variable, with different places.  We can't just
294 	     randomly chose one, hence we have to say that we don't know.
295 	     This only happens with optimization, and there var-tracking
296 	     will figure out the right thing.  */
297 	  else if (DECL_RTL (var) != x)
298 	    SET_DECL_RTL (var, pc_rtx);
299 	}
300     }
301   else
302     SET_DECL_RTL (t, x);
303 }
304 
305 /* This structure holds data relevant to one variable that will be
306    placed in a stack slot.  */
307 class stack_var
308 {
309 public:
310   /* The Variable.  */
311   tree decl;
312 
313   /* Initially, the size of the variable.  Later, the size of the partition,
314      if this variable becomes it's partition's representative.  */
315   poly_uint64 size;
316 
317   /* The *byte* alignment required for this variable.  Or as, with the
318      size, the alignment for this partition.  */
319   unsigned int alignb;
320 
321   /* The partition representative.  */
322   size_t representative;
323 
324   /* The next stack variable in the partition, or EOC.  */
325   size_t next;
326 
327   /* The numbers of conflicting stack variables.  */
328   bitmap conflicts;
329 };
330 
331 #define EOC  ((size_t)-1)
332 
333 /* We have an array of such objects while deciding allocation.  */
334 static class stack_var *stack_vars;
335 static size_t stack_vars_alloc;
336 static size_t stack_vars_num;
337 static hash_map<tree, size_t> *decl_to_stack_part;
338 
339 /* Conflict bitmaps go on this obstack.  This allows us to destroy
340    all of them in one big sweep.  */
341 static bitmap_obstack stack_var_bitmap_obstack;
342 
343 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
344    is non-decreasing.  */
345 static size_t *stack_vars_sorted;
346 
347 /* The phase of the stack frame.  This is the known misalignment of
348    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
349    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
350 static int frame_phase;
351 
352 /* Used during expand_used_vars to remember if we saw any decls for
353    which we'd like to enable stack smashing protection.  */
354 static bool has_protected_decls;
355 
356 /* Used during expand_used_vars.  Remember if we say a character buffer
357    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
358 static bool has_short_buffer;
359 
360 /* Compute the byte alignment to use for DECL.  Ignore alignment
361    we can't do with expected alignment of the stack boundary.  */
362 
363 static unsigned int
align_local_variable(tree decl,bool really_expand)364 align_local_variable (tree decl, bool really_expand)
365 {
366   unsigned int align;
367 
368   if (TREE_CODE (decl) == SSA_NAME)
369     align = TYPE_ALIGN (TREE_TYPE (decl));
370   else
371     {
372       align = LOCAL_DECL_ALIGNMENT (decl);
373       /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
374 	 That is done before IPA and could bump alignment based on host
375 	 backend even for offloaded code which wants different
376 	 LOCAL_DECL_ALIGNMENT.  */
377       if (really_expand)
378 	SET_DECL_ALIGN (decl, align);
379     }
380   return align / BITS_PER_UNIT;
381 }
382 
383 /* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
384    down otherwise.  Return truncated BASE value.  */
385 
386 static inline unsigned HOST_WIDE_INT
align_base(HOST_WIDE_INT base,unsigned HOST_WIDE_INT align,bool align_up)387 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
388 {
389   return align_up ? (base + align - 1) & -align : base & -align;
390 }
391 
392 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
393    Return the frame offset.  */
394 
395 static poly_int64
alloc_stack_frame_space(poly_int64 size,unsigned HOST_WIDE_INT align)396 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
397 {
398   poly_int64 offset, new_frame_offset;
399 
400   if (FRAME_GROWS_DOWNWARD)
401     {
402       new_frame_offset
403 	= aligned_lower_bound (frame_offset - frame_phase - size,
404 			       align) + frame_phase;
405       offset = new_frame_offset;
406     }
407   else
408     {
409       new_frame_offset
410 	= aligned_upper_bound (frame_offset - frame_phase,
411 			       align) + frame_phase;
412       offset = new_frame_offset;
413       new_frame_offset += size;
414     }
415   frame_offset = new_frame_offset;
416 
417   if (frame_offset_overflow (frame_offset, cfun->decl))
418     frame_offset = offset = 0;
419 
420   return offset;
421 }
422 
423 /* Accumulate DECL into STACK_VARS.  */
424 
425 static void
add_stack_var(tree decl,bool really_expand)426 add_stack_var (tree decl, bool really_expand)
427 {
428   class stack_var *v;
429 
430   if (stack_vars_num >= stack_vars_alloc)
431     {
432       if (stack_vars_alloc)
433 	stack_vars_alloc = stack_vars_alloc * 3 / 2;
434       else
435 	stack_vars_alloc = 32;
436       stack_vars
437 	= XRESIZEVEC (class stack_var, stack_vars, stack_vars_alloc);
438     }
439   if (!decl_to_stack_part)
440     decl_to_stack_part = new hash_map<tree, size_t>;
441 
442   v = &stack_vars[stack_vars_num];
443   decl_to_stack_part->put (decl, stack_vars_num);
444 
445   v->decl = decl;
446   tree size = TREE_CODE (decl) == SSA_NAME
447     ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
448     : DECL_SIZE_UNIT (decl);
449   v->size = tree_to_poly_uint64 (size);
450   /* Ensure that all variables have size, so that &a != &b for any two
451      variables that are simultaneously live.  */
452   if (known_eq (v->size, 0U))
453     v->size = 1;
454   v->alignb = align_local_variable (decl, really_expand);
455   /* An alignment of zero can mightily confuse us later.  */
456   gcc_assert (v->alignb != 0);
457 
458   /* All variables are initially in their own partition.  */
459   v->representative = stack_vars_num;
460   v->next = EOC;
461 
462   /* All variables initially conflict with no other.  */
463   v->conflicts = NULL;
464 
465   /* Ensure that this decl doesn't get put onto the list twice.  */
466   set_rtl (decl, pc_rtx);
467 
468   stack_vars_num++;
469 }
470 
471 /* Make the decls associated with luid's X and Y conflict.  */
472 
473 static void
add_stack_var_conflict(size_t x,size_t y)474 add_stack_var_conflict (size_t x, size_t y)
475 {
476   class stack_var *a = &stack_vars[x];
477   class stack_var *b = &stack_vars[y];
478   if (x == y)
479     return;
480   if (!a->conflicts)
481     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
482   if (!b->conflicts)
483     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
484   bitmap_set_bit (a->conflicts, y);
485   bitmap_set_bit (b->conflicts, x);
486 }
487 
488 /* Check whether the decls associated with luid's X and Y conflict.  */
489 
490 static bool
stack_var_conflict_p(size_t x,size_t y)491 stack_var_conflict_p (size_t x, size_t y)
492 {
493   class stack_var *a = &stack_vars[x];
494   class stack_var *b = &stack_vars[y];
495   if (x == y)
496     return false;
497   /* Partitions containing an SSA name result from gimple registers
498      with things like unsupported modes.  They are top-level and
499      hence conflict with everything else.  */
500   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
501     return true;
502 
503   if (!a->conflicts || !b->conflicts)
504     return false;
505   return bitmap_bit_p (a->conflicts, y);
506 }
507 
508 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
509    enter its partition number into bitmap DATA.  */
510 
511 static bool
visit_op(gimple *,tree op,tree,void * data)512 visit_op (gimple *, tree op, tree, void *data)
513 {
514   bitmap active = (bitmap)data;
515   op = get_base_address (op);
516   if (op
517       && DECL_P (op)
518       && DECL_RTL_IF_SET (op) == pc_rtx)
519     {
520       size_t *v = decl_to_stack_part->get (op);
521       if (v)
522 	bitmap_set_bit (active, *v);
523     }
524   return false;
525 }
526 
527 /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
528    record conflicts between it and all currently active other partitions
529    from bitmap DATA.  */
530 
531 static bool
visit_conflict(gimple *,tree op,tree,void * data)532 visit_conflict (gimple *, tree op, tree, void *data)
533 {
534   bitmap active = (bitmap)data;
535   op = get_base_address (op);
536   if (op
537       && DECL_P (op)
538       && DECL_RTL_IF_SET (op) == pc_rtx)
539     {
540       size_t *v = decl_to_stack_part->get (op);
541       if (v && bitmap_set_bit (active, *v))
542 	{
543 	  size_t num = *v;
544 	  bitmap_iterator bi;
545 	  unsigned i;
546 	  gcc_assert (num < stack_vars_num);
547 	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
548 	    add_stack_var_conflict (num, i);
549 	}
550     }
551   return false;
552 }
553 
554 /* Helper routine for add_scope_conflicts, calculating the active partitions
555    at the end of BB, leaving the result in WORK.  We're called to generate
556    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
557    liveness.  */
558 
559 static void
add_scope_conflicts_1(basic_block bb,bitmap work,bool for_conflict)560 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
561 {
562   edge e;
563   edge_iterator ei;
564   gimple_stmt_iterator gsi;
565   walk_stmt_load_store_addr_fn visit;
566 
567   bitmap_clear (work);
568   FOR_EACH_EDGE (e, ei, bb->preds)
569     bitmap_ior_into (work, (bitmap)e->src->aux);
570 
571   visit = visit_op;
572 
573   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
574     {
575       gimple *stmt = gsi_stmt (gsi);
576       walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
577     }
578   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
579     {
580       gimple *stmt = gsi_stmt (gsi);
581 
582       if (gimple_clobber_p (stmt))
583 	{
584 	  tree lhs = gimple_assign_lhs (stmt);
585 	  size_t *v;
586 	  /* Nested function lowering might introduce LHSs
587 	     that are COMPONENT_REFs.  */
588 	  if (!VAR_P (lhs))
589 	    continue;
590 	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
591 	      && (v = decl_to_stack_part->get (lhs)))
592 	    bitmap_clear_bit (work, *v);
593 	}
594       else if (!is_gimple_debug (stmt))
595 	{
596 	  if (for_conflict
597 	      && visit == visit_op)
598 	    {
599 	      /* If this is the first real instruction in this BB we need
600 	         to add conflicts for everything live at this point now.
601 		 Unlike classical liveness for named objects we can't
602 		 rely on seeing a def/use of the names we're interested in.
603 		 There might merely be indirect loads/stores.  We'd not add any
604 		 conflicts for such partitions.  */
605 	      bitmap_iterator bi;
606 	      unsigned i;
607 	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
608 		{
609 		  class stack_var *a = &stack_vars[i];
610 		  if (!a->conflicts)
611 		    a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
612 		  bitmap_ior_into (a->conflicts, work);
613 		}
614 	      visit = visit_conflict;
615 	    }
616 	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
617 	}
618     }
619 }
620 
621 /* Generate stack partition conflicts between all partitions that are
622    simultaneously live.  */
623 
624 static void
add_scope_conflicts(void)625 add_scope_conflicts (void)
626 {
627   basic_block bb;
628   bool changed;
629   bitmap work = BITMAP_ALLOC (NULL);
630   int *rpo;
631   int n_bbs;
632 
633   /* We approximate the live range of a stack variable by taking the first
634      mention of its name as starting point(s), and by the end-of-scope
635      death clobber added by gimplify as ending point(s) of the range.
636      This overapproximates in the case we for instance moved an address-taken
637      operation upward, without also moving a dereference to it upwards.
638      But it's conservatively correct as a variable never can hold values
639      before its name is mentioned at least once.
640 
641      We then do a mostly classical bitmap liveness algorithm.  */
642 
643   FOR_ALL_BB_FN (bb, cfun)
644     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
645 
646   rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
647   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
648 
649   changed = true;
650   while (changed)
651     {
652       int i;
653       changed = false;
654       for (i = 0; i < n_bbs; i++)
655 	{
656 	  bitmap active;
657 	  bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
658 	  active = (bitmap)bb->aux;
659 	  add_scope_conflicts_1 (bb, work, false);
660 	  if (bitmap_ior_into (active, work))
661 	    changed = true;
662 	}
663     }
664 
665   FOR_EACH_BB_FN (bb, cfun)
666     add_scope_conflicts_1 (bb, work, true);
667 
668   free (rpo);
669   BITMAP_FREE (work);
670   FOR_ALL_BB_FN (bb, cfun)
671     BITMAP_FREE (bb->aux);
672 }
673 
674 /* A subroutine of partition_stack_vars.  A comparison function for qsort,
675    sorting an array of indices by the properties of the object.  */
676 
677 static int
stack_var_cmp(const void * a,const void * b)678 stack_var_cmp (const void *a, const void *b)
679 {
680   size_t ia = *(const size_t *)a;
681   size_t ib = *(const size_t *)b;
682   unsigned int aligna = stack_vars[ia].alignb;
683   unsigned int alignb = stack_vars[ib].alignb;
684   poly_int64 sizea = stack_vars[ia].size;
685   poly_int64 sizeb = stack_vars[ib].size;
686   tree decla = stack_vars[ia].decl;
687   tree declb = stack_vars[ib].decl;
688   bool largea, largeb;
689   unsigned int uida, uidb;
690 
691   /* Primary compare on "large" alignment.  Large comes first.  */
692   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
693   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
694   if (largea != largeb)
695     return (int)largeb - (int)largea;
696 
697   /* Secondary compare on size, decreasing  */
698   int diff = compare_sizes_for_sort (sizeb, sizea);
699   if (diff != 0)
700     return diff;
701 
702   /* Tertiary compare on true alignment, decreasing.  */
703   if (aligna < alignb)
704     return -1;
705   if (aligna > alignb)
706     return 1;
707 
708   /* Final compare on ID for sort stability, increasing.
709      Two SSA names are compared by their version, SSA names come before
710      non-SSA names, and two normal decls are compared by their DECL_UID.  */
711   if (TREE_CODE (decla) == SSA_NAME)
712     {
713       if (TREE_CODE (declb) == SSA_NAME)
714 	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
715       else
716 	return -1;
717     }
718   else if (TREE_CODE (declb) == SSA_NAME)
719     return 1;
720   else
721     uida = DECL_UID (decla), uidb = DECL_UID (declb);
722   if (uida < uidb)
723     return 1;
724   if (uida > uidb)
725     return -1;
726   return 0;
727 }
728 
729 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
730 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
731 
732 /* If the points-to solution *PI points to variables that are in a partition
733    together with other variables add all partition members to the pointed-to
734    variables bitmap.  */
735 
736 static void
add_partitioned_vars_to_ptset(struct pt_solution * pt,part_hashmap * decls_to_partitions,hash_set<bitmap> * visited,bitmap temp)737 add_partitioned_vars_to_ptset (struct pt_solution *pt,
738 			       part_hashmap *decls_to_partitions,
739 			       hash_set<bitmap> *visited, bitmap temp)
740 {
741   bitmap_iterator bi;
742   unsigned i;
743   bitmap *part;
744 
745   if (pt->anything
746       || pt->vars == NULL
747       /* The pointed-to vars bitmap is shared, it is enough to
748 	 visit it once.  */
749       || visited->add (pt->vars))
750     return;
751 
752   bitmap_clear (temp);
753 
754   /* By using a temporary bitmap to store all members of the partitions
755      we have to add we make sure to visit each of the partitions only
756      once.  */
757   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
758     if ((!temp
759 	 || !bitmap_bit_p (temp, i))
760 	&& (part = decls_to_partitions->get (i)))
761       bitmap_ior_into (temp, *part);
762   if (!bitmap_empty_p (temp))
763     bitmap_ior_into (pt->vars, temp);
764 }
765 
766 /* Update points-to sets based on partition info, so we can use them on RTL.
767    The bitmaps representing stack partitions will be saved until expand,
768    where partitioned decls used as bases in memory expressions will be
769    rewritten.  */
770 
771 static void
update_alias_info_with_stack_vars(void)772 update_alias_info_with_stack_vars (void)
773 {
774   part_hashmap *decls_to_partitions = NULL;
775   size_t i, j;
776   tree var = NULL_TREE;
777 
778   for (i = 0; i < stack_vars_num; i++)
779     {
780       bitmap part = NULL;
781       tree name;
782       struct ptr_info_def *pi;
783 
784       /* Not interested in partitions with single variable.  */
785       if (stack_vars[i].representative != i
786           || stack_vars[i].next == EOC)
787         continue;
788 
789       if (!decls_to_partitions)
790 	{
791 	  decls_to_partitions = new part_hashmap;
792 	  cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
793 	}
794 
795       /* Create an SSA_NAME that points to the partition for use
796          as base during alias-oracle queries on RTL for bases that
797 	 have been partitioned.  */
798       if (var == NULL_TREE)
799 	var = create_tmp_var (ptr_type_node);
800       name = make_ssa_name (var);
801 
802       /* Create bitmaps representing partitions.  They will be used for
803          points-to sets later, so use GGC alloc.  */
804       part = BITMAP_GGC_ALLOC ();
805       for (j = i; j != EOC; j = stack_vars[j].next)
806 	{
807 	  tree decl = stack_vars[j].decl;
808 	  unsigned int uid = DECL_PT_UID (decl);
809 	  bitmap_set_bit (part, uid);
810 	  decls_to_partitions->put (uid, part);
811 	  cfun->gimple_df->decls_to_pointers->put (decl, name);
812 	  if (TREE_ADDRESSABLE (decl))
813 	    TREE_ADDRESSABLE (name) = 1;
814 	}
815 
816       /* Make the SSA name point to all partition members.  */
817       pi = get_ptr_info (name);
818       pt_solution_set (&pi->pt, part, false);
819     }
820 
821   /* Make all points-to sets that contain one member of a partition
822      contain all members of the partition.  */
823   if (decls_to_partitions)
824     {
825       unsigned i;
826       tree name;
827       hash_set<bitmap> visited;
828       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
829 
830       FOR_EACH_SSA_NAME (i, name, cfun)
831 	{
832 	  struct ptr_info_def *pi;
833 
834 	  if (POINTER_TYPE_P (TREE_TYPE (name))
835 	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
836 	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
837 					   &visited, temp);
838 	}
839 
840       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
841 				     decls_to_partitions, &visited, temp);
842 
843       delete decls_to_partitions;
844       BITMAP_FREE (temp);
845     }
846 }
847 
848 /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
849    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
850    Merge them into a single partition A.  */
851 
852 static void
union_stack_vars(size_t a,size_t b)853 union_stack_vars (size_t a, size_t b)
854 {
855   class stack_var *vb = &stack_vars[b];
856   bitmap_iterator bi;
857   unsigned u;
858 
859   gcc_assert (stack_vars[b].next == EOC);
860    /* Add B to A's partition.  */
861   stack_vars[b].next = stack_vars[a].next;
862   stack_vars[b].representative = a;
863   stack_vars[a].next = b;
864 
865   /* Make sure A is big enough to hold B.  */
866   stack_vars[a].size = upper_bound (stack_vars[a].size, stack_vars[b].size);
867 
868   /* Update the required alignment of partition A to account for B.  */
869   if (stack_vars[a].alignb < stack_vars[b].alignb)
870     stack_vars[a].alignb = stack_vars[b].alignb;
871 
872   /* Update the interference graph and merge the conflicts.  */
873   if (vb->conflicts)
874     {
875       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
876 	add_stack_var_conflict (a, stack_vars[u].representative);
877       BITMAP_FREE (vb->conflicts);
878     }
879 }
880 
881 /* A subroutine of expand_used_vars.  Binpack the variables into
882    partitions constrained by the interference graph.  The overall
883    algorithm used is as follows:
884 
885 	Sort the objects by size in descending order.
886 	For each object A {
887 	  S = size(A)
888 	  O = 0
889 	  loop {
890 	    Look for the largest non-conflicting object B with size <= S.
891 	    UNION (A, B)
892 	  }
893 	}
894 */
895 
896 static void
partition_stack_vars(void)897 partition_stack_vars (void)
898 {
899   size_t si, sj, n = stack_vars_num;
900 
901   stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
902   for (si = 0; si < n; ++si)
903     stack_vars_sorted[si] = si;
904 
905   if (n == 1)
906     return;
907 
908   qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
909 
910   for (si = 0; si < n; ++si)
911     {
912       size_t i = stack_vars_sorted[si];
913       unsigned int ialign = stack_vars[i].alignb;
914       poly_int64 isize = stack_vars[i].size;
915 
916       /* Ignore objects that aren't partition representatives. If we
917          see a var that is not a partition representative, it must
918          have been merged earlier.  */
919       if (stack_vars[i].representative != i)
920         continue;
921 
922       for (sj = si + 1; sj < n; ++sj)
923 	{
924 	  size_t j = stack_vars_sorted[sj];
925 	  unsigned int jalign = stack_vars[j].alignb;
926 	  poly_int64 jsize = stack_vars[j].size;
927 
928 	  /* Ignore objects that aren't partition representatives.  */
929 	  if (stack_vars[j].representative != j)
930 	    continue;
931 
932 	  /* Do not mix objects of "small" (supported) alignment
933 	     and "large" (unsupported) alignment.  */
934 	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
935 	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
936 	    break;
937 
938 	  /* For Address Sanitizer do not mix objects with different
939 	     sizes, as the shorter vars wouldn't be adequately protected.
940 	     Don't do that for "large" (unsupported) alignment objects,
941 	     those aren't protected anyway.  */
942 	  if (asan_sanitize_stack_p ()
943 	      && maybe_ne (isize, jsize)
944 	      && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
945 	    break;
946 
947 	  /* Ignore conflicting objects.  */
948 	  if (stack_var_conflict_p (i, j))
949 	    continue;
950 
951 	  /* UNION the objects, placing J at OFFSET.  */
952 	  union_stack_vars (i, j);
953 	}
954     }
955 
956   update_alias_info_with_stack_vars ();
957 }
958 
959 /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
960 
961 static void
dump_stack_var_partition(void)962 dump_stack_var_partition (void)
963 {
964   size_t si, i, j, n = stack_vars_num;
965 
966   for (si = 0; si < n; ++si)
967     {
968       i = stack_vars_sorted[si];
969 
970       /* Skip variables that aren't partition representatives, for now.  */
971       if (stack_vars[i].representative != i)
972 	continue;
973 
974       fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
975       print_dec (stack_vars[i].size, dump_file);
976       fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
977 
978       for (j = i; j != EOC; j = stack_vars[j].next)
979 	{
980 	  fputc ('\t', dump_file);
981 	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
982 	}
983       fputc ('\n', dump_file);
984     }
985 }
986 
987 /* Assign rtl to DECL at BASE + OFFSET.  */
988 
989 static void
expand_one_stack_var_at(tree decl,rtx base,unsigned base_align,poly_int64 offset)990 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
991 			 poly_int64 offset)
992 {
993   unsigned align;
994   rtx x;
995 
996   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
997   gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
998 
999   x = plus_constant (Pmode, base, offset);
1000   x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
1001 		   ? TYPE_MODE (TREE_TYPE (decl))
1002 		   : DECL_MODE (SSAVAR (decl)), x);
1003 
1004   if (TREE_CODE (decl) != SSA_NAME)
1005     {
1006       /* Set alignment we actually gave this decl if it isn't an SSA name.
1007          If it is we generate stack slots only accidentally so it isn't as
1008 	 important, we'll simply use the alignment that is already set.  */
1009       if (base == virtual_stack_vars_rtx)
1010 	offset -= frame_phase;
1011       align = known_alignment (offset);
1012       align *= BITS_PER_UNIT;
1013       if (align == 0 || align > base_align)
1014 	align = base_align;
1015 
1016       /* One would think that we could assert that we're not decreasing
1017 	 alignment here, but (at least) the i386 port does exactly this
1018 	 via the MINIMUM_ALIGNMENT hook.  */
1019 
1020       SET_DECL_ALIGN (decl, align);
1021       DECL_USER_ALIGN (decl) = 0;
1022     }
1023 
1024   set_rtl (decl, x);
1025 }
1026 
1027 class stack_vars_data
1028 {
1029 public:
1030   /* Vector of offset pairs, always end of some padding followed
1031      by start of the padding that needs Address Sanitizer protection.
1032      The vector is in reversed, highest offset pairs come first.  */
1033   auto_vec<HOST_WIDE_INT> asan_vec;
1034 
1035   /* Vector of partition representative decls in between the paddings.  */
1036   auto_vec<tree> asan_decl_vec;
1037 
1038   /* Base pseudo register for Address Sanitizer protected automatic vars.  */
1039   rtx asan_base;
1040 
1041   /* Alignment needed for the Address Sanitizer protected automatic vars.  */
1042   unsigned int asan_alignb;
1043 };
1044 
1045 /* A subroutine of expand_used_vars.  Give each partition representative
1046    a unique location within the stack frame.  Update each partition member
1047    with that location.  */
1048 
1049 static void
expand_stack_vars(bool (* pred)(size_t),class stack_vars_data * data)1050 expand_stack_vars (bool (*pred) (size_t), class stack_vars_data *data)
1051 {
1052   size_t si, i, j, n = stack_vars_num;
1053   poly_uint64 large_size = 0, large_alloc = 0;
1054   rtx large_base = NULL;
1055   unsigned large_align = 0;
1056   bool large_allocation_done = false;
1057   tree decl;
1058 
1059   /* Determine if there are any variables requiring "large" alignment.
1060      Since these are dynamically allocated, we only process these if
1061      no predicate involved.  */
1062   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1063   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1064     {
1065       /* Find the total size of these variables.  */
1066       for (si = 0; si < n; ++si)
1067 	{
1068 	  unsigned alignb;
1069 
1070 	  i = stack_vars_sorted[si];
1071 	  alignb = stack_vars[i].alignb;
1072 
1073 	  /* All "large" alignment decls come before all "small" alignment
1074 	     decls, but "large" alignment decls are not sorted based on
1075 	     their alignment.  Increase large_align to track the largest
1076 	     required alignment.  */
1077 	  if ((alignb * BITS_PER_UNIT) > large_align)
1078 	    large_align = alignb * BITS_PER_UNIT;
1079 
1080 	  /* Stop when we get to the first decl with "small" alignment.  */
1081 	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1082 	    break;
1083 
1084 	  /* Skip variables that aren't partition representatives.  */
1085 	  if (stack_vars[i].representative != i)
1086 	    continue;
1087 
1088 	  /* Skip variables that have already had rtl assigned.  See also
1089 	     add_stack_var where we perpetrate this pc_rtx hack.  */
1090 	  decl = stack_vars[i].decl;
1091 	  if (TREE_CODE (decl) == SSA_NAME
1092 	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1093 	      : DECL_RTL (decl) != pc_rtx)
1094 	    continue;
1095 
1096 	  large_size = aligned_upper_bound (large_size, alignb);
1097 	  large_size += stack_vars[i].size;
1098 	}
1099     }
1100 
1101   for (si = 0; si < n; ++si)
1102     {
1103       rtx base;
1104       unsigned base_align, alignb;
1105       poly_int64 offset;
1106 
1107       i = stack_vars_sorted[si];
1108 
1109       /* Skip variables that aren't partition representatives, for now.  */
1110       if (stack_vars[i].representative != i)
1111 	continue;
1112 
1113       /* Skip variables that have already had rtl assigned.  See also
1114 	 add_stack_var where we perpetrate this pc_rtx hack.  */
1115       decl = stack_vars[i].decl;
1116       if (TREE_CODE (decl) == SSA_NAME
1117 	  ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1118 	  : DECL_RTL (decl) != pc_rtx)
1119 	continue;
1120 
1121       /* Check the predicate to see whether this variable should be
1122 	 allocated in this pass.  */
1123       if (pred && !pred (i))
1124 	continue;
1125 
1126       alignb = stack_vars[i].alignb;
1127       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1128 	{
1129 	  base = virtual_stack_vars_rtx;
1130 	  /* ASAN description strings don't yet have a syntax for expressing
1131 	     polynomial offsets.  */
1132 	  HOST_WIDE_INT prev_offset;
1133 	  if (asan_sanitize_stack_p ()
1134 	      && pred
1135 	      && frame_offset.is_constant (&prev_offset)
1136 	      && stack_vars[i].size.is_constant ())
1137 	    {
1138 	      if (data->asan_vec.is_empty ())
1139 		{
1140 		  alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE);
1141 		  prev_offset = frame_offset.to_constant ();
1142 		}
1143 	      prev_offset = align_base (prev_offset,
1144 					ASAN_MIN_RED_ZONE_SIZE,
1145 					!FRAME_GROWS_DOWNWARD);
1146 	      tree repr_decl = NULL_TREE;
1147 	      unsigned HOST_WIDE_INT size
1148 		= asan_var_and_redzone_size (stack_vars[i].size.to_constant ());
1149 	      if (data->asan_vec.is_empty ())
1150 		size = MAX (size, ASAN_RED_ZONE_SIZE);
1151 
1152 	      unsigned HOST_WIDE_INT alignment = MAX (alignb,
1153 						      ASAN_MIN_RED_ZONE_SIZE);
1154 	      offset = alloc_stack_frame_space (size, alignment);
1155 
1156 	      data->asan_vec.safe_push (prev_offset);
1157 	      /* Allocating a constant amount of space from a constant
1158 		 starting offset must give a constant result.  */
1159 	      data->asan_vec.safe_push ((offset + stack_vars[i].size)
1160 					.to_constant ());
1161 	      /* Find best representative of the partition.
1162 		 Prefer those with DECL_NAME, even better
1163 		 satisfying asan_protect_stack_decl predicate.  */
1164 	      for (j = i; j != EOC; j = stack_vars[j].next)
1165 		if (asan_protect_stack_decl (stack_vars[j].decl)
1166 		    && DECL_NAME (stack_vars[j].decl))
1167 		  {
1168 		    repr_decl = stack_vars[j].decl;
1169 		    break;
1170 		  }
1171 		else if (repr_decl == NULL_TREE
1172 			 && DECL_P (stack_vars[j].decl)
1173 			 && DECL_NAME (stack_vars[j].decl))
1174 		  repr_decl = stack_vars[j].decl;
1175 	      if (repr_decl == NULL_TREE)
1176 		repr_decl = stack_vars[i].decl;
1177 	      data->asan_decl_vec.safe_push (repr_decl);
1178 
1179 	      /* Make sure a representative is unpoison if another
1180 		 variable in the partition is handled by
1181 		 use-after-scope sanitization.  */
1182 	      if (asan_handled_variables != NULL
1183 		  && !asan_handled_variables->contains (repr_decl))
1184 		{
1185 		  for (j = i; j != EOC; j = stack_vars[j].next)
1186 		    if (asan_handled_variables->contains (stack_vars[j].decl))
1187 		      break;
1188 		  if (j != EOC)
1189 		    asan_handled_variables->add (repr_decl);
1190 		}
1191 
1192 	      data->asan_alignb = MAX (data->asan_alignb, alignb);
1193 	      if (data->asan_base == NULL)
1194 		data->asan_base = gen_reg_rtx (Pmode);
1195 	      base = data->asan_base;
1196 
1197 	      if (!STRICT_ALIGNMENT)
1198 		base_align = crtl->max_used_stack_slot_alignment;
1199 	      else
1200 		base_align = MAX (crtl->max_used_stack_slot_alignment,
1201 				  GET_MODE_ALIGNMENT (SImode)
1202 				  << ASAN_SHADOW_SHIFT);
1203 	    }
1204 	  else
1205 	    {
1206 	      offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1207 	      base_align = crtl->max_used_stack_slot_alignment;
1208 	    }
1209 	}
1210       else
1211 	{
1212 	  /* Large alignment is only processed in the last pass.  */
1213 	  if (pred)
1214 	    continue;
1215 
1216 	  /* If there were any variables requiring "large" alignment, allocate
1217 	     space.  */
1218 	  if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1219 	    {
1220 	      poly_int64 loffset;
1221 	      rtx large_allocsize;
1222 
1223 	      large_allocsize = gen_int_mode (large_size, Pmode);
1224 	      get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1225 	      loffset = alloc_stack_frame_space
1226 		(rtx_to_poly_int64 (large_allocsize),
1227 		 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1228 	      large_base = get_dynamic_stack_base (loffset, large_align);
1229 	      large_allocation_done = true;
1230 	    }
1231 	  gcc_assert (large_base != NULL);
1232 
1233 	  large_alloc = aligned_upper_bound (large_alloc, alignb);
1234 	  offset = large_alloc;
1235 	  large_alloc += stack_vars[i].size;
1236 
1237 	  base = large_base;
1238 	  base_align = large_align;
1239 	}
1240 
1241       /* Create rtl for each variable based on their location within the
1242 	 partition.  */
1243       for (j = i; j != EOC; j = stack_vars[j].next)
1244 	{
1245 	  expand_one_stack_var_at (stack_vars[j].decl,
1246 				   base, base_align,
1247 				   offset);
1248 	}
1249     }
1250 
1251   gcc_assert (known_eq (large_alloc, large_size));
1252 }
1253 
1254 /* Take into account all sizes of partitions and reset DECL_RTLs.  */
1255 static poly_uint64
account_stack_vars(void)1256 account_stack_vars (void)
1257 {
1258   size_t si, j, i, n = stack_vars_num;
1259   poly_uint64 size = 0;
1260 
1261   for (si = 0; si < n; ++si)
1262     {
1263       i = stack_vars_sorted[si];
1264 
1265       /* Skip variables that aren't partition representatives, for now.  */
1266       if (stack_vars[i].representative != i)
1267 	continue;
1268 
1269       size += stack_vars[i].size;
1270       for (j = i; j != EOC; j = stack_vars[j].next)
1271 	set_rtl (stack_vars[j].decl, NULL);
1272     }
1273   return size;
1274 }
1275 
1276 /* Record the RTL assignment X for the default def of PARM.  */
1277 
1278 extern void
set_parm_rtl(tree parm,rtx x)1279 set_parm_rtl (tree parm, rtx x)
1280 {
1281   gcc_assert (TREE_CODE (parm) == PARM_DECL
1282 	      || TREE_CODE (parm) == RESULT_DECL);
1283 
1284   if (x && !MEM_P (x))
1285     {
1286       unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1287 					      TYPE_MODE (TREE_TYPE (parm)),
1288 					      TYPE_ALIGN (TREE_TYPE (parm)));
1289 
1290       /* If the variable alignment is very large we'll dynamicaly
1291 	 allocate it, which means that in-frame portion is just a
1292 	 pointer.  ??? We've got a pseudo for sure here, do we
1293 	 actually dynamically allocate its spilling area if needed?
1294 	 ??? Isn't it a problem when Pmode alignment also exceeds
1295 	 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32?  */
1296       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1297 	align = GET_MODE_ALIGNMENT (Pmode);
1298 
1299       record_alignment_for_reg_var (align);
1300     }
1301 
1302   tree ssa = ssa_default_def (cfun, parm);
1303   if (!ssa)
1304     return set_rtl (parm, x);
1305 
1306   int part = var_to_partition (SA.map, ssa);
1307   gcc_assert (part != NO_PARTITION);
1308 
1309   bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1310   gcc_assert (changed);
1311 
1312   set_rtl (ssa, x);
1313   gcc_assert (DECL_RTL (parm) == x);
1314 }
1315 
1316 /* A subroutine of expand_one_var.  Called to immediately assign rtl
1317    to a variable to be allocated in the stack frame.  */
1318 
1319 static void
expand_one_stack_var_1(tree var)1320 expand_one_stack_var_1 (tree var)
1321 {
1322   poly_uint64 size;
1323   poly_int64 offset;
1324   unsigned byte_align;
1325 
1326   if (TREE_CODE (var) == SSA_NAME)
1327     {
1328       tree type = TREE_TYPE (var);
1329       size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1330       byte_align = TYPE_ALIGN_UNIT (type);
1331     }
1332   else
1333     {
1334       size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1335       byte_align = align_local_variable (var, true);
1336     }
1337 
1338   /* We handle highly aligned variables in expand_stack_vars.  */
1339   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1340 
1341   offset = alloc_stack_frame_space (size, byte_align);
1342 
1343   expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1344 			   crtl->max_used_stack_slot_alignment, offset);
1345 }
1346 
1347 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1348    already assigned some MEM.  */
1349 
1350 static void
expand_one_stack_var(tree var)1351 expand_one_stack_var (tree var)
1352 {
1353   if (TREE_CODE (var) == SSA_NAME)
1354     {
1355       int part = var_to_partition (SA.map, var);
1356       if (part != NO_PARTITION)
1357 	{
1358 	  rtx x = SA.partition_to_pseudo[part];
1359 	  gcc_assert (x);
1360 	  gcc_assert (MEM_P (x));
1361 	  return;
1362 	}
1363     }
1364 
1365   return expand_one_stack_var_1 (var);
1366 }
1367 
1368 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1369    that will reside in a hard register.  */
1370 
1371 static void
expand_one_hard_reg_var(tree var)1372 expand_one_hard_reg_var (tree var)
1373 {
1374   rest_of_decl_compilation (var, 0, 0);
1375 }
1376 
1377 /* Record the alignment requirements of some variable assigned to a
1378    pseudo.  */
1379 
1380 static void
record_alignment_for_reg_var(unsigned int align)1381 record_alignment_for_reg_var (unsigned int align)
1382 {
1383   if (SUPPORTS_STACK_ALIGNMENT
1384       && crtl->stack_alignment_estimated < align)
1385     {
1386       /* stack_alignment_estimated shouldn't change after stack
1387          realign decision made */
1388       gcc_assert (!crtl->stack_realign_processed);
1389       crtl->stack_alignment_estimated = align;
1390     }
1391 
1392   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1393      So here we only make sure stack_alignment_needed >= align.  */
1394   if (crtl->stack_alignment_needed < align)
1395     crtl->stack_alignment_needed = align;
1396   if (crtl->max_used_stack_slot_alignment < align)
1397     crtl->max_used_stack_slot_alignment = align;
1398 }
1399 
1400 /* Create RTL for an SSA partition.  */
1401 
1402 static void
expand_one_ssa_partition(tree var)1403 expand_one_ssa_partition (tree var)
1404 {
1405   int part = var_to_partition (SA.map, var);
1406   gcc_assert (part != NO_PARTITION);
1407 
1408   if (SA.partition_to_pseudo[part])
1409     return;
1410 
1411   unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1412 					  TYPE_MODE (TREE_TYPE (var)),
1413 					  TYPE_ALIGN (TREE_TYPE (var)));
1414 
1415   /* If the variable alignment is very large we'll dynamicaly allocate
1416      it, which means that in-frame portion is just a pointer.  */
1417   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1418     align = GET_MODE_ALIGNMENT (Pmode);
1419 
1420   record_alignment_for_reg_var (align);
1421 
1422   if (!use_register_for_decl (var))
1423     {
1424       if (defer_stack_allocation (var, true))
1425 	add_stack_var (var, true);
1426       else
1427 	expand_one_stack_var_1 (var);
1428       return;
1429     }
1430 
1431   machine_mode reg_mode = promote_ssa_mode (var, NULL);
1432   rtx x = gen_reg_rtx (reg_mode);
1433 
1434   set_rtl (var, x);
1435 
1436   /* For a promoted variable, X will not be used directly but wrapped in a
1437      SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1438      will assume that its upper bits can be inferred from its lower bits.
1439      Therefore, if X isn't initialized on every path from the entry, then
1440      we must do it manually in order to fulfill the above assumption.  */
1441   if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1442       && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1443     emit_move_insn (x, CONST0_RTX (reg_mode));
1444 }
1445 
1446 /* Record the association between the RTL generated for partition PART
1447    and the underlying variable of the SSA_NAME VAR.  */
1448 
1449 static void
adjust_one_expanded_partition_var(tree var)1450 adjust_one_expanded_partition_var (tree var)
1451 {
1452   if (!var)
1453     return;
1454 
1455   tree decl = SSA_NAME_VAR (var);
1456 
1457   int part = var_to_partition (SA.map, var);
1458   if (part == NO_PARTITION)
1459     return;
1460 
1461   rtx x = SA.partition_to_pseudo[part];
1462 
1463   gcc_assert (x);
1464 
1465   set_rtl (var, x);
1466 
1467   if (!REG_P (x))
1468     return;
1469 
1470   /* Note if the object is a user variable.  */
1471   if (decl && !DECL_ARTIFICIAL (decl))
1472     mark_user_reg (x);
1473 
1474   if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1475     mark_reg_pointer (x, get_pointer_alignment (var));
1476 }
1477 
1478 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1479    that will reside in a pseudo register.  */
1480 
1481 static void
expand_one_register_var(tree var)1482 expand_one_register_var (tree var)
1483 {
1484   if (TREE_CODE (var) == SSA_NAME)
1485     {
1486       int part = var_to_partition (SA.map, var);
1487       if (part != NO_PARTITION)
1488 	{
1489 	  rtx x = SA.partition_to_pseudo[part];
1490 	  gcc_assert (x);
1491 	  gcc_assert (REG_P (x));
1492 	  return;
1493 	}
1494       gcc_unreachable ();
1495     }
1496 
1497   tree decl = var;
1498   tree type = TREE_TYPE (decl);
1499   machine_mode reg_mode = promote_decl_mode (decl, NULL);
1500   rtx x = gen_reg_rtx (reg_mode);
1501 
1502   set_rtl (var, x);
1503 
1504   /* Note if the object is a user variable.  */
1505   if (!DECL_ARTIFICIAL (decl))
1506     mark_user_reg (x);
1507 
1508   if (POINTER_TYPE_P (type))
1509     mark_reg_pointer (x, get_pointer_alignment (var));
1510 }
1511 
1512 /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1513    has some associated error, e.g. its type is error-mark.  We just need
1514    to pick something that won't crash the rest of the compiler.  */
1515 
1516 static void
expand_one_error_var(tree var)1517 expand_one_error_var (tree var)
1518 {
1519   machine_mode mode = DECL_MODE (var);
1520   rtx x;
1521 
1522   if (mode == BLKmode)
1523     x = gen_rtx_MEM (BLKmode, const0_rtx);
1524   else if (mode == VOIDmode)
1525     x = const0_rtx;
1526   else
1527     x = gen_reg_rtx (mode);
1528 
1529   SET_DECL_RTL (var, x);
1530 }
1531 
1532 /* A subroutine of expand_one_var.  VAR is a variable that will be
1533    allocated to the local stack frame.  Return true if we wish to
1534    add VAR to STACK_VARS so that it will be coalesced with other
1535    variables.  Return false to allocate VAR immediately.
1536 
1537    This function is used to reduce the number of variables considered
1538    for coalescing, which reduces the size of the quadratic problem.  */
1539 
1540 static bool
defer_stack_allocation(tree var,bool toplevel)1541 defer_stack_allocation (tree var, bool toplevel)
1542 {
1543   tree size_unit = TREE_CODE (var) == SSA_NAME
1544     ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1545     : DECL_SIZE_UNIT (var);
1546   poly_uint64 size;
1547 
1548   /* Whether the variable is small enough for immediate allocation not to be
1549      a problem with regard to the frame size.  */
1550   bool smallish
1551     = (poly_int_tree_p (size_unit, &size)
1552        && (estimated_poly_value (size)
1553 	   < param_min_size_for_stack_sharing));
1554 
1555   /* If stack protection is enabled, *all* stack variables must be deferred,
1556      so that we can re-order the strings to the top of the frame.
1557      Similarly for Address Sanitizer.  */
1558   if (flag_stack_protect || asan_sanitize_stack_p ())
1559     return true;
1560 
1561   unsigned int align = TREE_CODE (var) == SSA_NAME
1562     ? TYPE_ALIGN (TREE_TYPE (var))
1563     : DECL_ALIGN (var);
1564 
1565   /* We handle "large" alignment via dynamic allocation.  We want to handle
1566      this extra complication in only one place, so defer them.  */
1567   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1568     return true;
1569 
1570   bool ignored = TREE_CODE (var) == SSA_NAME
1571     ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1572     : DECL_IGNORED_P (var);
1573 
1574   /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1575      might be detached from their block and appear at toplevel when we reach
1576      here.  We want to coalesce them with variables from other blocks when
1577      the immediate contribution to the frame size would be noticeable.  */
1578   if (toplevel && optimize > 0 && ignored && !smallish)
1579     return true;
1580 
1581   /* Variables declared in the outermost scope automatically conflict
1582      with every other variable.  The only reason to want to defer them
1583      at all is that, after sorting, we can more efficiently pack
1584      small variables in the stack frame.  Continue to defer at -O2.  */
1585   if (toplevel && optimize < 2)
1586     return false;
1587 
1588   /* Without optimization, *most* variables are allocated from the
1589      stack, which makes the quadratic problem large exactly when we
1590      want compilation to proceed as quickly as possible.  On the
1591      other hand, we don't want the function's stack frame size to
1592      get completely out of hand.  So we avoid adding scalars and
1593      "small" aggregates to the list at all.  */
1594   if (optimize == 0 && smallish)
1595     return false;
1596 
1597   return true;
1598 }
1599 
1600 /* A subroutine of expand_used_vars.  Expand one variable according to
1601    its flavor.  Variables to be placed on the stack are not actually
1602    expanded yet, merely recorded.
1603    When REALLY_EXPAND is false, only add stack values to be allocated.
1604    Return stack usage this variable is supposed to take.
1605 */
1606 
1607 static poly_uint64
expand_one_var(tree var,bool toplevel,bool really_expand)1608 expand_one_var (tree var, bool toplevel, bool really_expand)
1609 {
1610   unsigned int align = BITS_PER_UNIT;
1611   tree origvar = var;
1612 
1613   var = SSAVAR (var);
1614 
1615   if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1616     {
1617       if (is_global_var (var))
1618 	return 0;
1619 
1620       /* Because we don't know if VAR will be in register or on stack,
1621 	 we conservatively assume it will be on stack even if VAR is
1622 	 eventually put into register after RA pass.  For non-automatic
1623 	 variables, which won't be on stack, we collect alignment of
1624 	 type and ignore user specified alignment.  Similarly for
1625 	 SSA_NAMEs for which use_register_for_decl returns true.  */
1626       if (TREE_STATIC (var)
1627 	  || DECL_EXTERNAL (var)
1628 	  || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1629 	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1630 				   TYPE_MODE (TREE_TYPE (var)),
1631 				   TYPE_ALIGN (TREE_TYPE (var)));
1632       else if (DECL_HAS_VALUE_EXPR_P (var)
1633 	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1634 	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1635 	   or variables which were assigned a stack slot already by
1636 	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1637 	   changed from the offset chosen to it.  */
1638 	align = crtl->stack_alignment_estimated;
1639       else
1640 	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1641 
1642       /* If the variable alignment is very large we'll dynamicaly allocate
1643 	 it, which means that in-frame portion is just a pointer.  */
1644       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1645 	align = GET_MODE_ALIGNMENT (Pmode);
1646     }
1647 
1648   record_alignment_for_reg_var (align);
1649 
1650   poly_uint64 size;
1651   if (TREE_CODE (origvar) == SSA_NAME)
1652     {
1653       gcc_assert (!VAR_P (var)
1654 		  || (!DECL_EXTERNAL (var)
1655 		      && !DECL_HAS_VALUE_EXPR_P (var)
1656 		      && !TREE_STATIC (var)
1657 		      && TREE_TYPE (var) != error_mark_node
1658 		      && !DECL_HARD_REGISTER (var)
1659 		      && really_expand));
1660     }
1661   if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1662     ;
1663   else if (DECL_EXTERNAL (var))
1664     ;
1665   else if (DECL_HAS_VALUE_EXPR_P (var))
1666     ;
1667   else if (TREE_STATIC (var))
1668     ;
1669   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1670     ;
1671   else if (TREE_TYPE (var) == error_mark_node)
1672     {
1673       if (really_expand)
1674         expand_one_error_var (var);
1675     }
1676   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1677     {
1678       if (really_expand)
1679 	{
1680 	  expand_one_hard_reg_var (var);
1681 	  if (!DECL_HARD_REGISTER (var))
1682 	    /* Invalid register specification.  */
1683 	    expand_one_error_var (var);
1684 	}
1685     }
1686   else if (use_register_for_decl (var))
1687     {
1688       if (really_expand)
1689         expand_one_register_var (origvar);
1690     }
1691   else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1692 	   || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1693     {
1694       /* Reject variables which cover more than half of the address-space.  */
1695       if (really_expand)
1696 	{
1697 	  if (DECL_NONLOCAL_FRAME (var))
1698 	    error_at (DECL_SOURCE_LOCATION (current_function_decl),
1699 		      "total size of local objects is too large");
1700 	  else
1701 	    error_at (DECL_SOURCE_LOCATION (var),
1702 		      "size of variable %q+D is too large", var);
1703 	  expand_one_error_var (var);
1704 	}
1705     }
1706   else if (defer_stack_allocation (var, toplevel))
1707     add_stack_var (origvar, really_expand);
1708   else
1709     {
1710       if (really_expand)
1711         {
1712           if (lookup_attribute ("naked",
1713                                 DECL_ATTRIBUTES (current_function_decl)))
1714 	    error ("cannot allocate stack for variable %q+D, naked function",
1715                    var);
1716 
1717           expand_one_stack_var (origvar);
1718         }
1719       return size;
1720     }
1721   return 0;
1722 }
1723 
1724 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1725    expanding variables.  Those variables that can be put into registers
1726    are allocated pseudos; those that can't are put on the stack.
1727 
1728    TOPLEVEL is true if this is the outermost BLOCK.  */
1729 
1730 static void
expand_used_vars_for_block(tree block,bool toplevel)1731 expand_used_vars_for_block (tree block, bool toplevel)
1732 {
1733   tree t;
1734 
1735   /* Expand all variables at this level.  */
1736   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1737     if (TREE_USED (t)
1738         && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1739 	    || !DECL_NONSHAREABLE (t)))
1740       expand_one_var (t, toplevel, true);
1741 
1742   /* Expand all variables at containing levels.  */
1743   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1744     expand_used_vars_for_block (t, false);
1745 }
1746 
1747 /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1748    and clear TREE_USED on all local variables.  */
1749 
1750 static void
clear_tree_used(tree block)1751 clear_tree_used (tree block)
1752 {
1753   tree t;
1754 
1755   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1756     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1757     if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1758 	|| !DECL_NONSHAREABLE (t))
1759       TREE_USED (t) = 0;
1760 
1761   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1762     clear_tree_used (t);
1763 }
1764 
1765 enum {
1766   SPCT_FLAG_DEFAULT = 1,
1767   SPCT_FLAG_ALL = 2,
1768   SPCT_FLAG_STRONG = 3,
1769   SPCT_FLAG_EXPLICIT = 4
1770 };
1771 
1772 /* Examine TYPE and determine a bit mask of the following features.  */
1773 
1774 #define SPCT_HAS_LARGE_CHAR_ARRAY	1
1775 #define SPCT_HAS_SMALL_CHAR_ARRAY	2
1776 #define SPCT_HAS_ARRAY			4
1777 #define SPCT_HAS_AGGREGATE		8
1778 
1779 static unsigned int
stack_protect_classify_type(tree type)1780 stack_protect_classify_type (tree type)
1781 {
1782   unsigned int ret = 0;
1783   tree t;
1784 
1785   switch (TREE_CODE (type))
1786     {
1787     case ARRAY_TYPE:
1788       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1789       if (t == char_type_node
1790 	  || t == signed_char_type_node
1791 	  || t == unsigned_char_type_node)
1792 	{
1793 	  unsigned HOST_WIDE_INT max = param_ssp_buffer_size;
1794 	  unsigned HOST_WIDE_INT len;
1795 
1796 	  if (!TYPE_SIZE_UNIT (type)
1797 	      || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1798 	    len = max;
1799 	  else
1800 	    len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1801 
1802 	  if (len < max)
1803 	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1804 	  else
1805 	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1806 	}
1807       else
1808 	ret = SPCT_HAS_ARRAY;
1809       break;
1810 
1811     case UNION_TYPE:
1812     case QUAL_UNION_TYPE:
1813     case RECORD_TYPE:
1814       ret = SPCT_HAS_AGGREGATE;
1815       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1816 	if (TREE_CODE (t) == FIELD_DECL)
1817 	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1818       break;
1819 
1820     default:
1821       break;
1822     }
1823 
1824   return ret;
1825 }
1826 
1827 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1828    part of the local stack frame.  Remember if we ever return nonzero for
1829    any variable in this function.  The return value is the phase number in
1830    which the variable should be allocated.  */
1831 
1832 static int
stack_protect_decl_phase(tree decl)1833 stack_protect_decl_phase (tree decl)
1834 {
1835   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1836   int ret = 0;
1837 
1838   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1839     has_short_buffer = true;
1840 
1841   if (flag_stack_protect == SPCT_FLAG_ALL
1842       || flag_stack_protect == SPCT_FLAG_STRONG
1843       || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1844 	  && lookup_attribute ("stack_protect",
1845 			       DECL_ATTRIBUTES (current_function_decl))))
1846     {
1847       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1848 	  && !(bits & SPCT_HAS_AGGREGATE))
1849 	ret = 1;
1850       else if (bits & SPCT_HAS_ARRAY)
1851 	ret = 2;
1852     }
1853   else
1854     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1855 
1856   if (ret)
1857     has_protected_decls = true;
1858 
1859   return ret;
1860 }
1861 
1862 /* Two helper routines that check for phase 1 and phase 2.  These are used
1863    as callbacks for expand_stack_vars.  */
1864 
1865 static bool
stack_protect_decl_phase_1(size_t i)1866 stack_protect_decl_phase_1 (size_t i)
1867 {
1868   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1869 }
1870 
1871 static bool
stack_protect_decl_phase_2(size_t i)1872 stack_protect_decl_phase_2 (size_t i)
1873 {
1874   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1875 }
1876 
1877 /* And helper function that checks for asan phase (with stack protector
1878    it is phase 3).  This is used as callback for expand_stack_vars.
1879    Returns true if any of the vars in the partition need to be protected.  */
1880 
1881 static bool
asan_decl_phase_3(size_t i)1882 asan_decl_phase_3 (size_t i)
1883 {
1884   while (i != EOC)
1885     {
1886       if (asan_protect_stack_decl (stack_vars[i].decl))
1887 	return true;
1888       i = stack_vars[i].next;
1889     }
1890   return false;
1891 }
1892 
1893 /* Ensure that variables in different stack protection phases conflict
1894    so that they are not merged and share the same stack slot.
1895    Return true if there are any address taken variables.  */
1896 
1897 static bool
add_stack_protection_conflicts(void)1898 add_stack_protection_conflicts (void)
1899 {
1900   size_t i, j, n = stack_vars_num;
1901   unsigned char *phase;
1902   bool ret = false;
1903 
1904   phase = XNEWVEC (unsigned char, n);
1905   for (i = 0; i < n; ++i)
1906     {
1907       phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1908       if (TREE_ADDRESSABLE (stack_vars[i].decl))
1909 	ret = true;
1910     }
1911 
1912   for (i = 0; i < n; ++i)
1913     {
1914       unsigned char ph_i = phase[i];
1915       for (j = i + 1; j < n; ++j)
1916 	if (ph_i != phase[j])
1917 	  add_stack_var_conflict (i, j);
1918     }
1919 
1920   XDELETEVEC (phase);
1921   return ret;
1922 }
1923 
1924 /* Create a decl for the guard at the top of the stack frame.  */
1925 
1926 static void
create_stack_guard(void)1927 create_stack_guard (void)
1928 {
1929   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1930 			   VAR_DECL, NULL, ptr_type_node);
1931   TREE_THIS_VOLATILE (guard) = 1;
1932   TREE_USED (guard) = 1;
1933   expand_one_stack_var (guard);
1934   crtl->stack_protect_guard = guard;
1935 }
1936 
1937 /* Prepare for expanding variables.  */
1938 static void
init_vars_expansion(void)1939 init_vars_expansion (void)
1940 {
1941   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
1942   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1943 
1944   /* A map from decl to stack partition.  */
1945   decl_to_stack_part = new hash_map<tree, size_t>;
1946 
1947   /* Initialize local stack smashing state.  */
1948   has_protected_decls = false;
1949   has_short_buffer = false;
1950 }
1951 
1952 /* Free up stack variable graph data.  */
1953 static void
fini_vars_expansion(void)1954 fini_vars_expansion (void)
1955 {
1956   bitmap_obstack_release (&stack_var_bitmap_obstack);
1957   if (stack_vars)
1958     XDELETEVEC (stack_vars);
1959   if (stack_vars_sorted)
1960     XDELETEVEC (stack_vars_sorted);
1961   stack_vars = NULL;
1962   stack_vars_sorted = NULL;
1963   stack_vars_alloc = stack_vars_num = 0;
1964   delete decl_to_stack_part;
1965   decl_to_stack_part = NULL;
1966 }
1967 
1968 /* Make a fair guess for the size of the stack frame of the function
1969    in NODE.  This doesn't have to be exact, the result is only used in
1970    the inline heuristics.  So we don't want to run the full stack var
1971    packing algorithm (which is quadratic in the number of stack vars).
1972    Instead, we calculate the total size of all stack vars.  This turns
1973    out to be a pretty fair estimate -- packing of stack vars doesn't
1974    happen very often.  */
1975 
1976 HOST_WIDE_INT
estimated_stack_frame_size(struct cgraph_node * node)1977 estimated_stack_frame_size (struct cgraph_node *node)
1978 {
1979   poly_int64 size = 0;
1980   size_t i;
1981   tree var;
1982   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1983 
1984   push_cfun (fn);
1985 
1986   init_vars_expansion ();
1987 
1988   FOR_EACH_LOCAL_DECL (fn, i, var)
1989     if (auto_var_in_fn_p (var, fn->decl))
1990       size += expand_one_var (var, true, false);
1991 
1992   if (stack_vars_num > 0)
1993     {
1994       /* Fake sorting the stack vars for account_stack_vars ().  */
1995       stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1996       for (i = 0; i < stack_vars_num; ++i)
1997 	stack_vars_sorted[i] = i;
1998       size += account_stack_vars ();
1999     }
2000 
2001   fini_vars_expansion ();
2002   pop_cfun ();
2003   return estimated_poly_value (size);
2004 }
2005 
2006 /* Check if the current function has calls that use a return slot.  */
2007 
2008 static bool
stack_protect_return_slot_p()2009 stack_protect_return_slot_p ()
2010 {
2011   basic_block bb;
2012 
2013   FOR_ALL_BB_FN (bb, cfun)
2014     for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2015 	 !gsi_end_p (gsi); gsi_next (&gsi))
2016       {
2017 	gimple *stmt = gsi_stmt (gsi);
2018 	/* This assumes that calls to internal-only functions never
2019 	   use a return slot.  */
2020 	if (is_gimple_call (stmt)
2021 	    && !gimple_call_internal_p (stmt)
2022 	    && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2023 				  gimple_call_fndecl (stmt)))
2024 	  return true;
2025       }
2026   return false;
2027 }
2028 
2029 /* Expand all variables used in the function.  */
2030 
2031 static rtx_insn *
expand_used_vars(void)2032 expand_used_vars (void)
2033 {
2034   tree var, outer_block = DECL_INITIAL (current_function_decl);
2035   auto_vec<tree> maybe_local_decls;
2036   rtx_insn *var_end_seq = NULL;
2037   unsigned i;
2038   unsigned len;
2039   bool gen_stack_protect_signal = false;
2040 
2041   /* Compute the phase of the stack frame for this function.  */
2042   {
2043     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2044     int off = targetm.starting_frame_offset () % align;
2045     frame_phase = off ? align - off : 0;
2046   }
2047 
2048   /* Set TREE_USED on all variables in the local_decls.  */
2049   FOR_EACH_LOCAL_DECL (cfun, i, var)
2050     TREE_USED (var) = 1;
2051   /* Clear TREE_USED on all variables associated with a block scope.  */
2052   clear_tree_used (DECL_INITIAL (current_function_decl));
2053 
2054   init_vars_expansion ();
2055 
2056   if (targetm.use_pseudo_pic_reg ())
2057     pic_offset_table_rtx = gen_reg_rtx (Pmode);
2058 
2059   for (i = 0; i < SA.map->num_partitions; i++)
2060     {
2061       if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2062 	continue;
2063 
2064       tree var = partition_to_var (SA.map, i);
2065 
2066       gcc_assert (!virtual_operand_p (var));
2067 
2068       expand_one_ssa_partition (var);
2069     }
2070 
2071   if (flag_stack_protect == SPCT_FLAG_STRONG)
2072     gen_stack_protect_signal = stack_protect_return_slot_p ();
2073 
2074   /* At this point all variables on the local_decls with TREE_USED
2075      set are not associated with any block scope.  Lay them out.  */
2076 
2077   len = vec_safe_length (cfun->local_decls);
2078   FOR_EACH_LOCAL_DECL (cfun, i, var)
2079     {
2080       bool expand_now = false;
2081 
2082       /* Expanded above already.  */
2083       if (is_gimple_reg (var))
2084 	{
2085 	  TREE_USED (var) = 0;
2086 	  goto next;
2087 	}
2088       /* We didn't set a block for static or extern because it's hard
2089 	 to tell the difference between a global variable (re)declared
2090 	 in a local scope, and one that's really declared there to
2091 	 begin with.  And it doesn't really matter much, since we're
2092 	 not giving them stack space.  Expand them now.  */
2093       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2094 	expand_now = true;
2095 
2096       /* Expand variables not associated with any block now.  Those created by
2097 	 the optimizers could be live anywhere in the function.  Those that
2098 	 could possibly have been scoped originally and detached from their
2099 	 block will have their allocation deferred so we coalesce them with
2100 	 others when optimization is enabled.  */
2101       else if (TREE_USED (var))
2102 	expand_now = true;
2103 
2104       /* Finally, mark all variables on the list as used.  We'll use
2105 	 this in a moment when we expand those associated with scopes.  */
2106       TREE_USED (var) = 1;
2107 
2108       if (expand_now)
2109 	expand_one_var (var, true, true);
2110 
2111     next:
2112       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2113 	{
2114 	  rtx rtl = DECL_RTL_IF_SET (var);
2115 
2116 	  /* Keep artificial non-ignored vars in cfun->local_decls
2117 	     chain until instantiate_decls.  */
2118 	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2119 	    add_local_decl (cfun, var);
2120 	  else if (rtl == NULL_RTX)
2121 	    /* If rtl isn't set yet, which can happen e.g. with
2122 	       -fstack-protector, retry before returning from this
2123 	       function.  */
2124 	    maybe_local_decls.safe_push (var);
2125 	}
2126     }
2127 
2128   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2129 
2130      +-----------------+-----------------+
2131      | ...processed... | ...duplicates...|
2132      +-----------------+-----------------+
2133                        ^
2134 		       +-- LEN points here.
2135 
2136      We just want the duplicates, as those are the artificial
2137      non-ignored vars that we want to keep until instantiate_decls.
2138      Move them down and truncate the array.  */
2139   if (!vec_safe_is_empty (cfun->local_decls))
2140     cfun->local_decls->block_remove (0, len);
2141 
2142   /* At this point, all variables within the block tree with TREE_USED
2143      set are actually used by the optimized function.  Lay them out.  */
2144   expand_used_vars_for_block (outer_block, true);
2145 
2146   if (stack_vars_num > 0)
2147     {
2148       bool has_addressable_vars = false;
2149 
2150       add_scope_conflicts ();
2151 
2152       /* If stack protection is enabled, we don't share space between
2153 	 vulnerable data and non-vulnerable data.  */
2154       if (flag_stack_protect != 0
2155 	  && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2156 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2157 		  && lookup_attribute ("stack_protect",
2158 				       DECL_ATTRIBUTES (current_function_decl)))))
2159 	has_addressable_vars = add_stack_protection_conflicts ();
2160 
2161       if (flag_stack_protect == SPCT_FLAG_STRONG && has_addressable_vars)
2162 	gen_stack_protect_signal = true;
2163 
2164       /* Now that we have collected all stack variables, and have computed a
2165 	 minimal interference graph, attempt to save some stack space.  */
2166       partition_stack_vars ();
2167       if (dump_file)
2168 	dump_stack_var_partition ();
2169     }
2170 
2171   switch (flag_stack_protect)
2172     {
2173     case SPCT_FLAG_ALL:
2174       create_stack_guard ();
2175       break;
2176 
2177     case SPCT_FLAG_STRONG:
2178       if (gen_stack_protect_signal
2179 	  || cfun->calls_alloca
2180 	  || has_protected_decls
2181 	  || lookup_attribute ("stack_protect",
2182 			       DECL_ATTRIBUTES (current_function_decl)))
2183 	create_stack_guard ();
2184       break;
2185 
2186     case SPCT_FLAG_DEFAULT:
2187       if (cfun->calls_alloca
2188 	  || has_protected_decls
2189 	  || lookup_attribute ("stack_protect",
2190 			       DECL_ATTRIBUTES (current_function_decl)))
2191 	create_stack_guard ();
2192       break;
2193 
2194     case SPCT_FLAG_EXPLICIT:
2195       if (lookup_attribute ("stack_protect",
2196 			    DECL_ATTRIBUTES (current_function_decl)))
2197 	create_stack_guard ();
2198       break;
2199 
2200     default:
2201       break;
2202     }
2203 
2204   /* Assign rtl to each variable based on these partitions.  */
2205   if (stack_vars_num > 0)
2206     {
2207       class stack_vars_data data;
2208 
2209       data.asan_base = NULL_RTX;
2210       data.asan_alignb = 0;
2211 
2212       /* Reorder decls to be protected by iterating over the variables
2213 	 array multiple times, and allocating out of each phase in turn.  */
2214       /* ??? We could probably integrate this into the qsort we did
2215 	 earlier, such that we naturally see these variables first,
2216 	 and thus naturally allocate things in the right order.  */
2217       if (has_protected_decls)
2218 	{
2219 	  /* Phase 1 contains only character arrays.  */
2220 	  expand_stack_vars (stack_protect_decl_phase_1, &data);
2221 
2222 	  /* Phase 2 contains other kinds of arrays.  */
2223 	  if (flag_stack_protect == SPCT_FLAG_ALL
2224 	      || flag_stack_protect == SPCT_FLAG_STRONG
2225 	      || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2226 		  && lookup_attribute ("stack_protect",
2227 				       DECL_ATTRIBUTES (current_function_decl))))
2228 	    expand_stack_vars (stack_protect_decl_phase_2, &data);
2229 	}
2230 
2231       if (asan_sanitize_stack_p ())
2232 	/* Phase 3, any partitions that need asan protection
2233 	   in addition to phase 1 and 2.  */
2234 	expand_stack_vars (asan_decl_phase_3, &data);
2235 
2236       /* ASAN description strings don't yet have a syntax for expressing
2237 	 polynomial offsets.  */
2238       HOST_WIDE_INT prev_offset;
2239       if (!data.asan_vec.is_empty ()
2240 	  && frame_offset.is_constant (&prev_offset))
2241 	{
2242 	  HOST_WIDE_INT offset, sz, redzonesz;
2243 	  redzonesz = ASAN_RED_ZONE_SIZE;
2244 	  sz = data.asan_vec[0] - prev_offset;
2245 	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2246 	      && data.asan_alignb <= 4096
2247 	      && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2248 	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2249 			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2250 	  /* Allocating a constant amount of space from a constant
2251 	     starting offset must give a constant result.  */
2252 	  offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2253 		    .to_constant ());
2254 	  data.asan_vec.safe_push (prev_offset);
2255 	  data.asan_vec.safe_push (offset);
2256 	  /* Leave space for alignment if STRICT_ALIGNMENT.  */
2257 	  if (STRICT_ALIGNMENT)
2258 	    alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2259 				      << ASAN_SHADOW_SHIFT)
2260 				     / BITS_PER_UNIT, 1);
2261 
2262 	  var_end_seq
2263 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
2264 					  data.asan_base,
2265 					  data.asan_alignb,
2266 					  data.asan_vec.address (),
2267 					  data.asan_decl_vec.address (),
2268 					  data.asan_vec.length ());
2269 	}
2270 
2271       expand_stack_vars (NULL, &data);
2272     }
2273 
2274   if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2275     var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2276 					      virtual_stack_vars_rtx,
2277 					      var_end_seq);
2278 
2279   fini_vars_expansion ();
2280 
2281   /* If there were any artificial non-ignored vars without rtl
2282      found earlier, see if deferred stack allocation hasn't assigned
2283      rtl to them.  */
2284   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2285     {
2286       rtx rtl = DECL_RTL_IF_SET (var);
2287 
2288       /* Keep artificial non-ignored vars in cfun->local_decls
2289 	 chain until instantiate_decls.  */
2290       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2291 	add_local_decl (cfun, var);
2292     }
2293 
2294   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
2295   if (STACK_ALIGNMENT_NEEDED)
2296     {
2297       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2298       if (FRAME_GROWS_DOWNWARD)
2299 	frame_offset = aligned_lower_bound (frame_offset, align);
2300       else
2301 	frame_offset = aligned_upper_bound (frame_offset, align);
2302     }
2303 
2304   return var_end_seq;
2305 }
2306 
2307 
2308 /* If we need to produce a detailed dump, print the tree representation
2309    for STMT to the dump file.  SINCE is the last RTX after which the RTL
2310    generated for STMT should have been appended.  */
2311 
2312 static void
maybe_dump_rtl_for_gimple_stmt(gimple * stmt,rtx_insn * since)2313 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2314 {
2315   if (dump_file && (dump_flags & TDF_DETAILS))
2316     {
2317       fprintf (dump_file, "\n;; ");
2318       print_gimple_stmt (dump_file, stmt, 0,
2319 			 TDF_SLIM | (dump_flags & TDF_LINENO));
2320       fprintf (dump_file, "\n");
2321 
2322       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2323     }
2324 }
2325 
2326 /* Maps the blocks that do not contain tree labels to rtx labels.  */
2327 
2328 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2329 
2330 /* Returns the label_rtx expression for a label starting basic block BB.  */
2331 
2332 static rtx_code_label *
label_rtx_for_bb(basic_block bb ATTRIBUTE_UNUSED)2333 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2334 {
2335   gimple_stmt_iterator gsi;
2336   tree lab;
2337 
2338   if (bb->flags & BB_RTL)
2339     return block_label (bb);
2340 
2341   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2342   if (elt)
2343     return *elt;
2344 
2345   /* Find the tree label if it is present.  */
2346 
2347   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2348     {
2349       glabel *lab_stmt;
2350 
2351       lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2352       if (!lab_stmt)
2353 	break;
2354 
2355       lab = gimple_label_label (lab_stmt);
2356       if (DECL_NONLOCAL (lab))
2357 	break;
2358 
2359       return jump_target_rtx (lab);
2360     }
2361 
2362   rtx_code_label *l = gen_label_rtx ();
2363   lab_rtx_for_bb->put (bb, l);
2364   return l;
2365 }
2366 
2367 
2368 /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
2369    of a basic block where we just expanded the conditional at the end,
2370    possibly clean up the CFG and instruction sequence.  LAST is the
2371    last instruction before the just emitted jump sequence.  */
2372 
2373 static void
maybe_cleanup_end_of_block(edge e,rtx_insn * last)2374 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2375 {
2376   /* Special case: when jumpif decides that the condition is
2377      trivial it emits an unconditional jump (and the necessary
2378      barrier).  But we still have two edges, the fallthru one is
2379      wrong.  purge_dead_edges would clean this up later.  Unfortunately
2380      we have to insert insns (and split edges) before
2381      find_many_sub_basic_blocks and hence before purge_dead_edges.
2382      But splitting edges might create new blocks which depend on the
2383      fact that if there are two edges there's no barrier.  So the
2384      barrier would get lost and verify_flow_info would ICE.  Instead
2385      of auditing all edge splitters to care for the barrier (which
2386      normally isn't there in a cleaned CFG), fix it here.  */
2387   if (BARRIER_P (get_last_insn ()))
2388     {
2389       rtx_insn *insn;
2390       remove_edge (e);
2391       /* Now, we have a single successor block, if we have insns to
2392 	 insert on the remaining edge we potentially will insert
2393 	 it at the end of this block (if the dest block isn't feasible)
2394 	 in order to avoid splitting the edge.  This insertion will take
2395 	 place in front of the last jump.  But we might have emitted
2396 	 multiple jumps (conditional and one unconditional) to the
2397 	 same destination.  Inserting in front of the last one then
2398 	 is a problem.  See PR 40021.  We fix this by deleting all
2399 	 jumps except the last unconditional one.  */
2400       insn = PREV_INSN (get_last_insn ());
2401       /* Make sure we have an unconditional jump.  Otherwise we're
2402 	 confused.  */
2403       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2404       for (insn = PREV_INSN (insn); insn != last;)
2405 	{
2406 	  insn = PREV_INSN (insn);
2407 	  if (JUMP_P (NEXT_INSN (insn)))
2408 	    {
2409 	      if (!any_condjump_p (NEXT_INSN (insn)))
2410 		{
2411 		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2412 		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2413 		}
2414 	      delete_insn (NEXT_INSN (insn));
2415 	    }
2416 	}
2417     }
2418 }
2419 
2420 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
2421    Returns a new basic block if we've terminated the current basic
2422    block and created a new one.  */
2423 
2424 static basic_block
expand_gimple_cond(basic_block bb,gcond * stmt)2425 expand_gimple_cond (basic_block bb, gcond *stmt)
2426 {
2427   basic_block new_bb, dest;
2428   edge true_edge;
2429   edge false_edge;
2430   rtx_insn *last2, *last;
2431   enum tree_code code;
2432   tree op0, op1;
2433 
2434   code = gimple_cond_code (stmt);
2435   op0 = gimple_cond_lhs (stmt);
2436   op1 = gimple_cond_rhs (stmt);
2437   /* We're sometimes presented with such code:
2438        D.123_1 = x < y;
2439        if (D.123_1 != 0)
2440          ...
2441      This would expand to two comparisons which then later might
2442      be cleaned up by combine.  But some pattern matchers like if-conversion
2443      work better when there's only one compare, so make up for this
2444      here as special exception if TER would have made the same change.  */
2445   if (SA.values
2446       && TREE_CODE (op0) == SSA_NAME
2447       && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2448       && TREE_CODE (op1) == INTEGER_CST
2449       && ((gimple_cond_code (stmt) == NE_EXPR
2450 	   && integer_zerop (op1))
2451 	  || (gimple_cond_code (stmt) == EQ_EXPR
2452 	      && integer_onep (op1)))
2453       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2454     {
2455       gimple *second = SSA_NAME_DEF_STMT (op0);
2456       if (gimple_code (second) == GIMPLE_ASSIGN)
2457 	{
2458 	  enum tree_code code2 = gimple_assign_rhs_code (second);
2459 	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
2460 	    {
2461 	      code = code2;
2462 	      op0 = gimple_assign_rhs1 (second);
2463 	      op1 = gimple_assign_rhs2 (second);
2464 	    }
2465 	  /* If jumps are cheap and the target does not support conditional
2466 	     compare, turn some more codes into jumpy sequences.  */
2467 	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2468 		   && targetm.gen_ccmp_first == NULL)
2469 	    {
2470 	      if ((code2 == BIT_AND_EXPR
2471 		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2472 		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2473 		  || code2 == TRUTH_AND_EXPR)
2474 		{
2475 		  code = TRUTH_ANDIF_EXPR;
2476 		  op0 = gimple_assign_rhs1 (second);
2477 		  op1 = gimple_assign_rhs2 (second);
2478 		}
2479 	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2480 		{
2481 		  code = TRUTH_ORIF_EXPR;
2482 		  op0 = gimple_assign_rhs1 (second);
2483 		  op1 = gimple_assign_rhs2 (second);
2484 		}
2485 	    }
2486 	}
2487     }
2488 
2489   /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2490      into (x - C2) * C3 < C4.  */
2491   if ((code == EQ_EXPR || code == NE_EXPR)
2492       && TREE_CODE (op0) == SSA_NAME
2493       && TREE_CODE (op1) == INTEGER_CST)
2494     code = maybe_optimize_mod_cmp (code, &op0, &op1);
2495 
2496   last2 = last = get_last_insn ();
2497 
2498   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2499   set_curr_insn_location (gimple_location (stmt));
2500 
2501   /* These flags have no purpose in RTL land.  */
2502   true_edge->flags &= ~EDGE_TRUE_VALUE;
2503   false_edge->flags &= ~EDGE_FALSE_VALUE;
2504 
2505   /* We can either have a pure conditional jump with one fallthru edge or
2506      two-way jump that needs to be decomposed into two basic blocks.  */
2507   if (false_edge->dest == bb->next_bb)
2508     {
2509       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2510 		true_edge->probability);
2511       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2512       if (true_edge->goto_locus != UNKNOWN_LOCATION)
2513 	set_curr_insn_location (true_edge->goto_locus);
2514       false_edge->flags |= EDGE_FALLTHRU;
2515       maybe_cleanup_end_of_block (false_edge, last);
2516       return NULL;
2517     }
2518   if (true_edge->dest == bb->next_bb)
2519     {
2520       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2521 		   false_edge->probability);
2522       maybe_dump_rtl_for_gimple_stmt (stmt, last);
2523       if (false_edge->goto_locus != UNKNOWN_LOCATION)
2524 	set_curr_insn_location (false_edge->goto_locus);
2525       true_edge->flags |= EDGE_FALLTHRU;
2526       maybe_cleanup_end_of_block (true_edge, last);
2527       return NULL;
2528     }
2529 
2530   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2531 	    true_edge->probability);
2532   last = get_last_insn ();
2533   if (false_edge->goto_locus != UNKNOWN_LOCATION)
2534     set_curr_insn_location (false_edge->goto_locus);
2535   emit_jump (label_rtx_for_bb (false_edge->dest));
2536 
2537   BB_END (bb) = last;
2538   if (BARRIER_P (BB_END (bb)))
2539     BB_END (bb) = PREV_INSN (BB_END (bb));
2540   update_bb_for_insn (bb);
2541 
2542   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2543   dest = false_edge->dest;
2544   redirect_edge_succ (false_edge, new_bb);
2545   false_edge->flags |= EDGE_FALLTHRU;
2546   new_bb->count = false_edge->count ();
2547   loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2548   add_bb_to_loop (new_bb, loop);
2549   if (loop->latch == bb
2550       && loop->header == dest)
2551     loop->latch = new_bb;
2552   make_single_succ_edge (new_bb, dest, 0);
2553   if (BARRIER_P (BB_END (new_bb)))
2554     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2555   update_bb_for_insn (new_bb);
2556 
2557   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2558 
2559   if (true_edge->goto_locus != UNKNOWN_LOCATION)
2560     {
2561       set_curr_insn_location (true_edge->goto_locus);
2562       true_edge->goto_locus = curr_insn_location ();
2563     }
2564 
2565   return new_bb;
2566 }
2567 
2568 /* Mark all calls that can have a transaction restart.  */
2569 
2570 static void
mark_transaction_restart_calls(gimple * stmt)2571 mark_transaction_restart_calls (gimple *stmt)
2572 {
2573   struct tm_restart_node dummy;
2574   tm_restart_node **slot;
2575 
2576   if (!cfun->gimple_df->tm_restart)
2577     return;
2578 
2579   dummy.stmt = stmt;
2580   slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2581   if (slot)
2582     {
2583       struct tm_restart_node *n = *slot;
2584       tree list = n->label_or_list;
2585       rtx_insn *insn;
2586 
2587       for (insn = next_real_insn (get_last_insn ());
2588 	   !CALL_P (insn);
2589 	   insn = next_real_insn (insn))
2590 	continue;
2591 
2592       if (TREE_CODE (list) == LABEL_DECL)
2593 	add_reg_note (insn, REG_TM, label_rtx (list));
2594       else
2595 	for (; list ; list = TREE_CHAIN (list))
2596 	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2597     }
2598 }
2599 
2600 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2601    statement STMT.  */
2602 
2603 static void
expand_call_stmt(gcall * stmt)2604 expand_call_stmt (gcall *stmt)
2605 {
2606   tree exp, decl, lhs;
2607   bool builtin_p;
2608   size_t i;
2609 
2610   if (gimple_call_internal_p (stmt))
2611     {
2612       expand_internal_call (stmt);
2613       return;
2614     }
2615 
2616   /* If this is a call to a built-in function and it has no effect other
2617      than setting the lhs, try to implement it using an internal function
2618      instead.  */
2619   decl = gimple_call_fndecl (stmt);
2620   if (gimple_call_lhs (stmt)
2621       && !gimple_has_side_effects (stmt)
2622       && (optimize || (decl && called_as_built_in (decl))))
2623     {
2624       internal_fn ifn = replacement_internal_fn (stmt);
2625       if (ifn != IFN_LAST)
2626 	{
2627 	  expand_internal_call (ifn, stmt);
2628 	  return;
2629 	}
2630     }
2631 
2632   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2633 
2634   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2635   builtin_p = decl && fndecl_built_in_p (decl);
2636 
2637   /* If this is not a builtin function, the function type through which the
2638      call is made may be different from the type of the function.  */
2639   if (!builtin_p)
2640     CALL_EXPR_FN (exp)
2641       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2642 		      CALL_EXPR_FN (exp));
2643 
2644   TREE_TYPE (exp) = gimple_call_return_type (stmt);
2645   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2646 
2647   for (i = 0; i < gimple_call_num_args (stmt); i++)
2648     {
2649       tree arg = gimple_call_arg (stmt, i);
2650       gimple *def;
2651       /* TER addresses into arguments of builtin functions so we have a
2652 	 chance to infer more correct alignment information.  See PR39954.  */
2653       if (builtin_p
2654 	  && TREE_CODE (arg) == SSA_NAME
2655 	  && (def = get_gimple_for_ssa_name (arg))
2656 	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2657 	arg = gimple_assign_rhs1 (def);
2658       CALL_EXPR_ARG (exp, i) = arg;
2659     }
2660 
2661   if (gimple_has_side_effects (stmt))
2662     TREE_SIDE_EFFECTS (exp) = 1;
2663 
2664   if (gimple_call_nothrow_p (stmt))
2665     TREE_NOTHROW (exp) = 1;
2666 
2667   if (gimple_no_warning_p (stmt))
2668     TREE_NO_WARNING (exp) = 1;
2669 
2670   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2671   CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2672   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2673   if (decl
2674       && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
2675       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2676     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2677   else
2678     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2679   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2680   CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2681   SET_EXPR_LOCATION (exp, gimple_location (stmt));
2682 
2683   /* Ensure RTL is created for debug args.  */
2684   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2685     {
2686       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2687       unsigned int ix;
2688       tree dtemp;
2689 
2690       if (debug_args)
2691 	for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2692 	  {
2693 	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2694 	    expand_debug_expr (dtemp);
2695 	  }
2696     }
2697 
2698   rtx_insn *before_call = get_last_insn ();
2699   lhs = gimple_call_lhs (stmt);
2700   if (lhs)
2701     expand_assignment (lhs, exp, false);
2702   else
2703     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2704 
2705   /* If the gimple call is an indirect call and has 'nocf_check'
2706      attribute find a generated CALL insn to mark it as no
2707      control-flow verification is needed.  */
2708   if (gimple_call_nocf_check_p (stmt)
2709       && !gimple_call_fndecl (stmt))
2710     {
2711       rtx_insn *last = get_last_insn ();
2712       while (!CALL_P (last)
2713 	     && last != before_call)
2714 	last = PREV_INSN (last);
2715 
2716       if (last != before_call)
2717 	add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2718     }
2719 
2720   mark_transaction_restart_calls (stmt);
2721 }
2722 
2723 
2724 /* Generate RTL for an asm statement (explicit assembler code).
2725    STRING is a STRING_CST node containing the assembler code text,
2726    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
2727    insn is volatile; don't optimize it.  */
2728 
2729 static void
expand_asm_loc(tree string,int vol,location_t locus)2730 expand_asm_loc (tree string, int vol, location_t locus)
2731 {
2732   rtx body;
2733 
2734   body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2735 				ggc_strdup (TREE_STRING_POINTER (string)),
2736 				locus);
2737 
2738   MEM_VOLATILE_P (body) = vol;
2739 
2740   /* Non-empty basic ASM implicitly clobbers memory.  */
2741   if (TREE_STRING_LENGTH (string) != 0)
2742     {
2743       rtx asm_op, clob;
2744       unsigned i, nclobbers;
2745       auto_vec<rtx> input_rvec, output_rvec;
2746       auto_vec<const char *> constraints;
2747       auto_vec<rtx> clobber_rvec;
2748       HARD_REG_SET clobbered_regs;
2749       CLEAR_HARD_REG_SET (clobbered_regs);
2750 
2751       clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2752       clobber_rvec.safe_push (clob);
2753 
2754       if (targetm.md_asm_adjust)
2755 	targetm.md_asm_adjust (output_rvec, input_rvec,
2756 			       constraints, clobber_rvec,
2757 			       clobbered_regs);
2758 
2759       asm_op = body;
2760       nclobbers = clobber_rvec.length ();
2761       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2762 
2763       XVECEXP (body, 0, 0) = asm_op;
2764       for (i = 0; i < nclobbers; i++)
2765 	XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2766     }
2767 
2768   emit_insn (body);
2769 }
2770 
2771 /* Return the number of times character C occurs in string S.  */
2772 static int
n_occurrences(int c,const char * s)2773 n_occurrences (int c, const char *s)
2774 {
2775   int n = 0;
2776   while (*s)
2777     n += (*s++ == c);
2778   return n;
2779 }
2780 
2781 /* A subroutine of expand_asm_operands.  Check that all operands have
2782    the same number of alternatives.  Return true if so.  */
2783 
2784 static bool
check_operand_nalternatives(const vec<const char * > & constraints)2785 check_operand_nalternatives (const vec<const char *> &constraints)
2786 {
2787   unsigned len = constraints.length();
2788   if (len > 0)
2789     {
2790       int nalternatives = n_occurrences (',', constraints[0]);
2791 
2792       if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2793 	{
2794 	  error ("too many alternatives in %<asm%>");
2795 	  return false;
2796 	}
2797 
2798       for (unsigned i = 1; i < len; ++i)
2799 	if (n_occurrences (',', constraints[i]) != nalternatives)
2800 	  {
2801 	    error ("operand constraints for %<asm%> differ "
2802 		   "in number of alternatives");
2803 	    return false;
2804 	  }
2805     }
2806   return true;
2807 }
2808 
2809 /* Check for overlap between registers marked in CLOBBERED_REGS and
2810    anything inappropriate in T.  Emit error and return the register
2811    variable definition for error, NULL_TREE for ok.  */
2812 
2813 static bool
tree_conflicts_with_clobbers_p(tree t,HARD_REG_SET * clobbered_regs)2814 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2815 {
2816   /* Conflicts between asm-declared register variables and the clobber
2817      list are not allowed.  */
2818   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2819 
2820   if (overlap)
2821     {
2822       error ("%<asm%> specifier for variable %qE conflicts with "
2823 	     "%<asm%> clobber list",
2824 	     DECL_NAME (overlap));
2825 
2826       /* Reset registerness to stop multiple errors emitted for a single
2827 	 variable.  */
2828       DECL_REGISTER (overlap) = 0;
2829       return true;
2830     }
2831 
2832   return false;
2833 }
2834 
2835 /* Check that the given REGNO spanning NREGS is a valid
2836    asm clobber operand.  Some HW registers cannot be
2837    saved/restored, hence they should not be clobbered by
2838    asm statements.  */
2839 static bool
asm_clobber_reg_is_valid(int regno,int nregs,const char * regname)2840 asm_clobber_reg_is_valid (int regno, int nregs, const char *regname)
2841 {
2842   bool is_valid = true;
2843   HARD_REG_SET regset;
2844 
2845   CLEAR_HARD_REG_SET (regset);
2846 
2847   add_range_to_hard_reg_set (&regset, regno, nregs);
2848 
2849   /* Clobbering the PIC register is an error.  */
2850   if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2851       && overlaps_hard_reg_set_p (regset, Pmode, PIC_OFFSET_TABLE_REGNUM))
2852     {
2853       /* ??? Diagnose during gimplification?  */
2854       error ("PIC register clobbered by %qs in %<asm%>", regname);
2855       is_valid = false;
2856     }
2857   else if (!in_hard_reg_set_p
2858 	   (accessible_reg_set, reg_raw_mode[regno], regno))
2859     {
2860       /* ??? Diagnose during gimplification?  */
2861       error ("the register %qs cannot be clobbered in %<asm%>"
2862 	     " for the current target", regname);
2863       is_valid = false;
2864     }
2865 
2866   /* Clobbering the stack pointer register is deprecated.  GCC expects
2867      the value of the stack pointer after an asm statement to be the same
2868      as it was before, so no asm can validly clobber the stack pointer in
2869      the usual sense.  Adding the stack pointer to the clobber list has
2870      traditionally had some undocumented and somewhat obscure side-effects.  */
2871   if (overlaps_hard_reg_set_p (regset, Pmode, STACK_POINTER_REGNUM))
2872     {
2873       crtl->sp_is_clobbered_by_asm = true;
2874       if (warning (OPT_Wdeprecated, "listing the stack pointer register"
2875 		   " %qs in a clobber list is deprecated", regname))
2876 	inform (input_location, "the value of the stack pointer after"
2877 		" an %<asm%> statement must be the same as it was before"
2878 		" the statement");
2879     }
2880 
2881   return is_valid;
2882 }
2883 
2884 /* Generate RTL for an asm statement with arguments.
2885    STRING is the instruction template.
2886    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2887    Each output or input has an expression in the TREE_VALUE and
2888    a tree list in TREE_PURPOSE which in turn contains a constraint
2889    name in TREE_VALUE (or NULL_TREE) and a constraint string
2890    in TREE_PURPOSE.
2891    CLOBBERS is a list of STRING_CST nodes each naming a hard register
2892    that is clobbered by this insn.
2893 
2894    LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2895    should be the fallthru basic block of the asm goto.
2896 
2897    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2898    Some elements of OUTPUTS may be replaced with trees representing temporary
2899    values.  The caller should copy those temporary values to the originally
2900    specified lvalues.
2901 
2902    VOL nonzero means the insn is volatile; don't optimize it.  */
2903 
2904 static void
expand_asm_stmt(gasm * stmt)2905 expand_asm_stmt (gasm *stmt)
2906 {
2907   class save_input_location
2908   {
2909     location_t old;
2910 
2911   public:
2912     explicit save_input_location(location_t where)
2913     {
2914       old = input_location;
2915       input_location = where;
2916     }
2917 
2918     ~save_input_location()
2919     {
2920       input_location = old;
2921     }
2922   };
2923 
2924   location_t locus = gimple_location (stmt);
2925 
2926   if (gimple_asm_input_p (stmt))
2927     {
2928       const char *s = gimple_asm_string (stmt);
2929       tree string = build_string (strlen (s), s);
2930       expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2931       return;
2932     }
2933 
2934   /* There are some legacy diagnostics in here, and also avoids a
2935      sixth parameger to targetm.md_asm_adjust.  */
2936   save_input_location s_i_l(locus);
2937 
2938   unsigned noutputs = gimple_asm_noutputs (stmt);
2939   unsigned ninputs = gimple_asm_ninputs (stmt);
2940   unsigned nlabels = gimple_asm_nlabels (stmt);
2941   unsigned i;
2942 
2943   /* ??? Diagnose during gimplification?  */
2944   if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2945     {
2946       error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2947       return;
2948     }
2949 
2950   auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2951   auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2952   auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2953 
2954   /* Copy the gimple vectors into new vectors that we can manipulate.  */
2955 
2956   output_tvec.safe_grow (noutputs);
2957   input_tvec.safe_grow (ninputs);
2958   constraints.safe_grow (noutputs + ninputs);
2959 
2960   for (i = 0; i < noutputs; ++i)
2961     {
2962       tree t = gimple_asm_output_op (stmt, i);
2963       output_tvec[i] = TREE_VALUE (t);
2964       constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2965     }
2966   for (i = 0; i < ninputs; i++)
2967     {
2968       tree t = gimple_asm_input_op (stmt, i);
2969       input_tvec[i] = TREE_VALUE (t);
2970       constraints[i + noutputs]
2971 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2972     }
2973 
2974   /* ??? Diagnose during gimplification?  */
2975   if (! check_operand_nalternatives (constraints))
2976     return;
2977 
2978   /* Count the number of meaningful clobbered registers, ignoring what
2979      we would ignore later.  */
2980   auto_vec<rtx> clobber_rvec;
2981   HARD_REG_SET clobbered_regs;
2982   CLEAR_HARD_REG_SET (clobbered_regs);
2983 
2984   if (unsigned n = gimple_asm_nclobbers (stmt))
2985     {
2986       clobber_rvec.reserve (n);
2987       for (i = 0; i < n; i++)
2988 	{
2989 	  tree t = gimple_asm_clobber_op (stmt, i);
2990           const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2991 	  int nregs, j;
2992 
2993 	  j = decode_reg_name_and_count (regname, &nregs);
2994 	  if (j < 0)
2995 	    {
2996 	      if (j == -2)
2997 		{
2998 		  /* ??? Diagnose during gimplification?  */
2999 		  error ("unknown register name %qs in %<asm%>", regname);
3000 		}
3001 	      else if (j == -4)
3002 		{
3003 		  rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3004 		  clobber_rvec.safe_push (x);
3005 		}
3006 	      else
3007 		{
3008 		  /* Otherwise we should have -1 == empty string
3009 		     or -3 == cc, which is not a register.  */
3010 		  gcc_assert (j == -1 || j == -3);
3011 		}
3012 	    }
3013 	  else
3014 	    for (int reg = j; reg < j + nregs; reg++)
3015 	      {
3016 		if (!asm_clobber_reg_is_valid (reg, nregs, regname))
3017 		  return;
3018 
3019 	        SET_HARD_REG_BIT (clobbered_regs, reg);
3020 	        rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
3021 		clobber_rvec.safe_push (x);
3022 	      }
3023 	}
3024     }
3025 
3026   /* First pass over inputs and outputs checks validity and sets
3027      mark_addressable if needed.  */
3028   /* ??? Diagnose during gimplification?  */
3029 
3030   for (i = 0; i < noutputs; ++i)
3031     {
3032       tree val = output_tvec[i];
3033       tree type = TREE_TYPE (val);
3034       const char *constraint;
3035       bool is_inout;
3036       bool allows_reg;
3037       bool allows_mem;
3038 
3039       /* Try to parse the output constraint.  If that fails, there's
3040 	 no point in going further.  */
3041       constraint = constraints[i];
3042       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3043 				    &allows_mem, &allows_reg, &is_inout))
3044 	return;
3045 
3046       /* If the output is a hard register, verify it doesn't conflict with
3047 	 any other operand's possible hard register use.  */
3048       if (DECL_P (val)
3049 	  && REG_P (DECL_RTL (val))
3050 	  && HARD_REGISTER_P (DECL_RTL (val)))
3051 	{
3052 	  unsigned j, output_hregno = REGNO (DECL_RTL (val));
3053 	  bool early_clobber_p = strchr (constraints[i], '&') != NULL;
3054 	  unsigned long match;
3055 
3056 	  /* Verify the other outputs do not use the same hard register.  */
3057 	  for (j = i + 1; j < noutputs; ++j)
3058 	    if (DECL_P (output_tvec[j])
3059 		&& REG_P (DECL_RTL (output_tvec[j]))
3060 		&& HARD_REGISTER_P (DECL_RTL (output_tvec[j]))
3061 		&& output_hregno == REGNO (DECL_RTL (output_tvec[j])))
3062 	      error ("invalid hard register usage between output operands");
3063 
3064 	  /* Verify matching constraint operands use the same hard register
3065 	     and that the non-matching constraint operands do not use the same
3066 	     hard register if the output is an early clobber operand.  */
3067 	  for (j = 0; j < ninputs; ++j)
3068 	    if (DECL_P (input_tvec[j])
3069 		&& REG_P (DECL_RTL (input_tvec[j]))
3070 		&& HARD_REGISTER_P (DECL_RTL (input_tvec[j])))
3071 	      {
3072 		unsigned input_hregno = REGNO (DECL_RTL (input_tvec[j]));
3073 		switch (*constraints[j + noutputs])
3074 		  {
3075 		  case '0':  case '1':  case '2':  case '3':  case '4':
3076 		  case '5':  case '6':  case '7':  case '8':  case '9':
3077 		    match = strtoul (constraints[j + noutputs], NULL, 10);
3078 		    break;
3079 		  default:
3080 		    match = ULONG_MAX;
3081 		    break;
3082 		  }
3083 		if (i == match
3084 		    && output_hregno != input_hregno)
3085 		  error ("invalid hard register usage between output operand "
3086 			 "and matching constraint operand");
3087 		else if (early_clobber_p
3088 			 && i != match
3089 			 && output_hregno == input_hregno)
3090 		  error ("invalid hard register usage between earlyclobber "
3091 			 "operand and input operand");
3092 	      }
3093 	}
3094 
3095       if (! allows_reg
3096 	  && (allows_mem
3097 	      || is_inout
3098 	      || (DECL_P (val)
3099 		  && REG_P (DECL_RTL (val))
3100 		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3101 	mark_addressable (val);
3102     }
3103 
3104   for (i = 0; i < ninputs; ++i)
3105     {
3106       bool allows_reg, allows_mem;
3107       const char *constraint;
3108 
3109       constraint = constraints[i + noutputs];
3110       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3111 				    constraints.address (),
3112 				    &allows_mem, &allows_reg))
3113 	return;
3114 
3115       if (! allows_reg && allows_mem)
3116 	mark_addressable (input_tvec[i]);
3117     }
3118 
3119   /* Second pass evaluates arguments.  */
3120 
3121   /* Make sure stack is consistent for asm goto.  */
3122   if (nlabels > 0)
3123     do_pending_stack_adjust ();
3124   int old_generating_concat_p = generating_concat_p;
3125 
3126   /* Vector of RTX's of evaluated output operands.  */
3127   auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3128   auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3129   rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3130 
3131   output_rvec.safe_grow (noutputs);
3132 
3133   for (i = 0; i < noutputs; ++i)
3134     {
3135       tree val = output_tvec[i];
3136       tree type = TREE_TYPE (val);
3137       bool is_inout, allows_reg, allows_mem, ok;
3138       rtx op;
3139 
3140       ok = parse_output_constraint (&constraints[i], i, ninputs,
3141 				    noutputs, &allows_mem, &allows_reg,
3142 				    &is_inout);
3143       gcc_assert (ok);
3144 
3145       /* If an output operand is not a decl or indirect ref and our constraint
3146 	 allows a register, make a temporary to act as an intermediate.
3147 	 Make the asm insn write into that, then we will copy it to
3148 	 the real output operand.  Likewise for promoted variables.  */
3149 
3150       generating_concat_p = 0;
3151 
3152       if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3153 	  || (DECL_P (val)
3154 	      && (allows_mem || REG_P (DECL_RTL (val)))
3155 	      && ! (REG_P (DECL_RTL (val))
3156 		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3157 	  || ! allows_reg
3158 	  || is_inout
3159 	  || TREE_ADDRESSABLE (type))
3160 	{
3161 	  op = expand_expr (val, NULL_RTX, VOIDmode,
3162 			    !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3163 	  if (MEM_P (op))
3164 	    op = validize_mem (op);
3165 
3166 	  if (! allows_reg && !MEM_P (op))
3167 	    error ("output number %d not directly addressable", i);
3168 	  if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3169 	      || GET_CODE (op) == CONCAT)
3170 	    {
3171 	      rtx old_op = op;
3172 	      op = gen_reg_rtx (GET_MODE (op));
3173 
3174 	      generating_concat_p = old_generating_concat_p;
3175 
3176 	      if (is_inout)
3177 		emit_move_insn (op, old_op);
3178 
3179 	      push_to_sequence2 (after_rtl_seq, after_rtl_end);
3180 	      emit_move_insn (old_op, op);
3181 	      after_rtl_seq = get_insns ();
3182 	      after_rtl_end = get_last_insn ();
3183 	      end_sequence ();
3184 	    }
3185 	}
3186       else
3187 	{
3188 	  op = assign_temp (type, 0, 1);
3189 	  op = validize_mem (op);
3190 	  if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3191 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3192 
3193 	  generating_concat_p = old_generating_concat_p;
3194 
3195 	  push_to_sequence2 (after_rtl_seq, after_rtl_end);
3196 	  expand_assignment (val, make_tree (type, op), false);
3197 	  after_rtl_seq = get_insns ();
3198 	  after_rtl_end = get_last_insn ();
3199 	  end_sequence ();
3200 	}
3201       output_rvec[i] = op;
3202 
3203       if (is_inout)
3204 	inout_opnum.safe_push (i);
3205     }
3206 
3207   auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3208   auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3209 
3210   input_rvec.safe_grow (ninputs);
3211   input_mode.safe_grow (ninputs);
3212 
3213   generating_concat_p = 0;
3214 
3215   for (i = 0; i < ninputs; ++i)
3216     {
3217       tree val = input_tvec[i];
3218       tree type = TREE_TYPE (val);
3219       bool allows_reg, allows_mem, ok;
3220       const char *constraint;
3221       rtx op;
3222 
3223       constraint = constraints[i + noutputs];
3224       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3225 				   constraints.address (),
3226 				   &allows_mem, &allows_reg);
3227       gcc_assert (ok);
3228 
3229       /* EXPAND_INITIALIZER will not generate code for valid initializer
3230 	 constants, but will still generate code for other types of operand.
3231 	 This is the behavior we want for constant constraints.  */
3232       op = expand_expr (val, NULL_RTX, VOIDmode,
3233 			allows_reg ? EXPAND_NORMAL
3234 			: allows_mem ? EXPAND_MEMORY
3235 			: EXPAND_INITIALIZER);
3236 
3237       /* Never pass a CONCAT to an ASM.  */
3238       if (GET_CODE (op) == CONCAT)
3239 	op = force_reg (GET_MODE (op), op);
3240       else if (MEM_P (op))
3241 	op = validize_mem (op);
3242 
3243       if (asm_operand_ok (op, constraint, NULL) <= 0)
3244 	{
3245 	  if (allows_reg && TYPE_MODE (type) != BLKmode)
3246 	    op = force_reg (TYPE_MODE (type), op);
3247 	  else if (!allows_mem)
3248 	    warning (0, "%<asm%> operand %d probably does not match "
3249 		     "constraints",
3250 		     i + noutputs);
3251 	  else if (MEM_P (op))
3252 	    {
3253 	      /* We won't recognize either volatile memory or memory
3254 		 with a queued address as available a memory_operand
3255 		 at this point.  Ignore it: clearly this *is* a memory.  */
3256 	    }
3257 	  else
3258 	    gcc_unreachable ();
3259 	}
3260       input_rvec[i] = op;
3261       input_mode[i] = TYPE_MODE (type);
3262     }
3263 
3264   /* For in-out operands, copy output rtx to input rtx.  */
3265   unsigned ninout = inout_opnum.length();
3266   for (i = 0; i < ninout; i++)
3267     {
3268       int j = inout_opnum[i];
3269       rtx o = output_rvec[j];
3270 
3271       input_rvec.safe_push (o);
3272       input_mode.safe_push (GET_MODE (o));
3273 
3274       char buffer[16];
3275       sprintf (buffer, "%d", j);
3276       constraints.safe_push (ggc_strdup (buffer));
3277     }
3278   ninputs += ninout;
3279 
3280   /* Sometimes we wish to automatically clobber registers across an asm.
3281      Case in point is when the i386 backend moved from cc0 to a hard reg --
3282      maintaining source-level compatibility means automatically clobbering
3283      the flags register.  */
3284   rtx_insn *after_md_seq = NULL;
3285   if (targetm.md_asm_adjust)
3286     after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3287 					  constraints, clobber_rvec,
3288 					  clobbered_regs);
3289 
3290   /* Do not allow the hook to change the output and input count,
3291      lest it mess up the operand numbering.  */
3292   gcc_assert (output_rvec.length() == noutputs);
3293   gcc_assert (input_rvec.length() == ninputs);
3294   gcc_assert (constraints.length() == noutputs + ninputs);
3295 
3296   /* But it certainly can adjust the clobbers.  */
3297   unsigned nclobbers = clobber_rvec.length ();
3298 
3299   /* Third pass checks for easy conflicts.  */
3300   /* ??? Why are we doing this on trees instead of rtx.  */
3301 
3302   bool clobber_conflict_found = 0;
3303   for (i = 0; i < noutputs; ++i)
3304     if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3305 	clobber_conflict_found = 1;
3306   for (i = 0; i < ninputs - ninout; ++i)
3307     if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3308 	clobber_conflict_found = 1;
3309 
3310   /* Make vectors for the expression-rtx, constraint strings,
3311      and named operands.  */
3312 
3313   rtvec argvec = rtvec_alloc (ninputs);
3314   rtvec constraintvec = rtvec_alloc (ninputs);
3315   rtvec labelvec = rtvec_alloc (nlabels);
3316 
3317   rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3318 				    : GET_MODE (output_rvec[0])),
3319 				   ggc_strdup (gimple_asm_string (stmt)),
3320 				   "", 0, argvec, constraintvec,
3321 				   labelvec, locus);
3322   MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3323 
3324   for (i = 0; i < ninputs; ++i)
3325     {
3326       ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3327       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3328 	= gen_rtx_ASM_INPUT_loc (input_mode[i],
3329 				 constraints[i + noutputs],
3330 				 locus);
3331     }
3332 
3333   /* Copy labels to the vector.  */
3334   rtx_code_label *fallthru_label = NULL;
3335   if (nlabels > 0)
3336     {
3337       basic_block fallthru_bb = NULL;
3338       edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3339       if (fallthru)
3340 	fallthru_bb = fallthru->dest;
3341 
3342       for (i = 0; i < nlabels; ++i)
3343 	{
3344 	  tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3345 	  rtx_insn *r;
3346 	  /* If asm goto has any labels in the fallthru basic block, use
3347 	     a label that we emit immediately after the asm goto.  Expansion
3348 	     may insert further instructions into the same basic block after
3349 	     asm goto and if we don't do this, insertion of instructions on
3350 	     the fallthru edge might misbehave.  See PR58670.  */
3351 	  if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
3352 	    {
3353 	      if (fallthru_label == NULL_RTX)
3354 	        fallthru_label = gen_label_rtx ();
3355 	      r = fallthru_label;
3356 	    }
3357 	  else
3358 	    r = label_rtx (label);
3359 	  ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3360 	}
3361     }
3362 
3363   /* Now, for each output, construct an rtx
3364      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3365 			       ARGVEC CONSTRAINTS OPNAMES))
3366      If there is more than one, put them inside a PARALLEL.  */
3367 
3368   if (nlabels > 0 && nclobbers == 0)
3369     {
3370       gcc_assert (noutputs == 0);
3371       emit_jump_insn (body);
3372     }
3373   else if (noutputs == 0 && nclobbers == 0)
3374     {
3375       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
3376       emit_insn (body);
3377     }
3378   else if (noutputs == 1 && nclobbers == 0)
3379     {
3380       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3381       emit_insn (gen_rtx_SET (output_rvec[0], body));
3382     }
3383   else
3384     {
3385       rtx obody = body;
3386       int num = noutputs;
3387 
3388       if (num == 0)
3389 	num = 1;
3390 
3391       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3392 
3393       /* For each output operand, store a SET.  */
3394       for (i = 0; i < noutputs; ++i)
3395 	{
3396 	  rtx src, o = output_rvec[i];
3397 	  if (i == 0)
3398 	    {
3399 	      ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3400 	      src = obody;
3401 	    }
3402 	  else
3403 	    {
3404 	      src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3405 					  ASM_OPERANDS_TEMPLATE (obody),
3406 					  constraints[i], i, argvec,
3407 					  constraintvec, labelvec, locus);
3408 	      MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3409 	    }
3410 	  XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3411 	}
3412 
3413       /* If there are no outputs (but there are some clobbers)
3414 	 store the bare ASM_OPERANDS into the PARALLEL.  */
3415       if (i == 0)
3416 	XVECEXP (body, 0, i++) = obody;
3417 
3418       /* Store (clobber REG) for each clobbered register specified.  */
3419       for (unsigned j = 0; j < nclobbers; ++j)
3420 	{
3421 	  rtx clobbered_reg = clobber_rvec[j];
3422 
3423 	  /* Do sanity check for overlap between clobbers and respectively
3424 	     input and outputs that hasn't been handled.  Such overlap
3425 	     should have been detected and reported above.  */
3426 	  if (!clobber_conflict_found && REG_P (clobbered_reg))
3427 	    {
3428 	      /* We test the old body (obody) contents to avoid
3429 		 tripping over the under-construction body.  */
3430 	      for (unsigned k = 0; k < noutputs; ++k)
3431 		if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3432 		  internal_error ("%<asm%> clobber conflict with "
3433 				  "output operand");
3434 
3435 	      for (unsigned k = 0; k < ninputs - ninout; ++k)
3436 		if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3437 		  internal_error ("%<asm%> clobber conflict with "
3438 				  "input operand");
3439 	    }
3440 
3441 	  XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3442 	}
3443 
3444       if (nlabels > 0)
3445 	emit_jump_insn (body);
3446       else
3447 	emit_insn (body);
3448     }
3449 
3450   generating_concat_p = old_generating_concat_p;
3451 
3452   if (fallthru_label)
3453     emit_label (fallthru_label);
3454 
3455   if (after_md_seq)
3456     emit_insn (after_md_seq);
3457   if (after_rtl_seq)
3458     emit_insn (after_rtl_seq);
3459 
3460   free_temp_slots ();
3461   crtl->has_asm_statement = 1;
3462 }
3463 
3464 /* Emit code to jump to the address
3465    specified by the pointer expression EXP.  */
3466 
3467 static void
expand_computed_goto(tree exp)3468 expand_computed_goto (tree exp)
3469 {
3470   rtx x = expand_normal (exp);
3471 
3472   do_pending_stack_adjust ();
3473   emit_indirect_jump (x);
3474 }
3475 
3476 /* Generate RTL code for a `goto' statement with target label LABEL.
3477    LABEL should be a LABEL_DECL tree node that was or will later be
3478    defined with `expand_label'.  */
3479 
3480 static void
expand_goto(tree label)3481 expand_goto (tree label)
3482 {
3483   if (flag_checking)
3484     {
3485       /* Check for a nonlocal goto to a containing function.  Should have
3486 	 gotten translated to __builtin_nonlocal_goto.  */
3487       tree context = decl_function_context (label);
3488       gcc_assert (!context || context == current_function_decl);
3489     }
3490 
3491   emit_jump (jump_target_rtx (label));
3492 }
3493 
3494 /* Output a return with no value.  */
3495 
3496 static void
expand_null_return_1(void)3497 expand_null_return_1 (void)
3498 {
3499   clear_pending_stack_adjust ();
3500   do_pending_stack_adjust ();
3501   emit_jump (return_label);
3502 }
3503 
3504 /* Generate RTL to return from the current function, with no value.
3505    (That is, we do not do anything about returning any value.)  */
3506 
3507 void
expand_null_return(void)3508 expand_null_return (void)
3509 {
3510   /* If this function was declared to return a value, but we
3511      didn't, clobber the return registers so that they are not
3512      propagated live to the rest of the function.  */
3513   clobber_return_register ();
3514 
3515   expand_null_return_1 ();
3516 }
3517 
3518 /* Generate RTL to return from the current function, with value VAL.  */
3519 
3520 static void
expand_value_return(rtx val)3521 expand_value_return (rtx val)
3522 {
3523   /* Copy the value to the return location unless it's already there.  */
3524 
3525   tree decl = DECL_RESULT (current_function_decl);
3526   rtx return_reg = DECL_RTL (decl);
3527   if (return_reg != val)
3528     {
3529       tree funtype = TREE_TYPE (current_function_decl);
3530       tree type = TREE_TYPE (decl);
3531       int unsignedp = TYPE_UNSIGNED (type);
3532       machine_mode old_mode = DECL_MODE (decl);
3533       machine_mode mode;
3534       if (DECL_BY_REFERENCE (decl))
3535         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3536       else
3537         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3538 
3539       if (mode != old_mode)
3540 	val = convert_modes (mode, old_mode, val, unsignedp);
3541 
3542       if (GET_CODE (return_reg) == PARALLEL)
3543 	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3544       else
3545 	emit_move_insn (return_reg, val);
3546     }
3547 
3548   expand_null_return_1 ();
3549 }
3550 
3551 /* Generate RTL to evaluate the expression RETVAL and return it
3552    from the current function.  */
3553 
3554 static void
expand_return(tree retval)3555 expand_return (tree retval)
3556 {
3557   rtx result_rtl;
3558   rtx val = 0;
3559   tree retval_rhs;
3560 
3561   /* If function wants no value, give it none.  */
3562   if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3563     {
3564       expand_normal (retval);
3565       expand_null_return ();
3566       return;
3567     }
3568 
3569   if (retval == error_mark_node)
3570     {
3571       /* Treat this like a return of no value from a function that
3572 	 returns a value.  */
3573       expand_null_return ();
3574       return;
3575     }
3576   else if ((TREE_CODE (retval) == MODIFY_EXPR
3577 	    || TREE_CODE (retval) == INIT_EXPR)
3578 	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3579     retval_rhs = TREE_OPERAND (retval, 1);
3580   else
3581     retval_rhs = retval;
3582 
3583   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3584 
3585   /* If we are returning the RESULT_DECL, then the value has already
3586      been stored into it, so we don't have to do anything special.  */
3587   if (TREE_CODE (retval_rhs) == RESULT_DECL)
3588     expand_value_return (result_rtl);
3589 
3590   /* If the result is an aggregate that is being returned in one (or more)
3591      registers, load the registers here.  */
3592 
3593   else if (retval_rhs != 0
3594 	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3595 	   && REG_P (result_rtl))
3596     {
3597       val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3598       if (val)
3599 	{
3600 	  /* Use the mode of the result value on the return register.  */
3601 	  PUT_MODE (result_rtl, GET_MODE (val));
3602 	  expand_value_return (val);
3603 	}
3604       else
3605 	expand_null_return ();
3606     }
3607   else if (retval_rhs != 0
3608 	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3609 	   && (REG_P (result_rtl)
3610 	       || (GET_CODE (result_rtl) == PARALLEL)))
3611     {
3612       /* Compute the return value into a temporary (usually a pseudo reg).  */
3613       val
3614 	= assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3615       val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3616       val = force_not_mem (val);
3617       expand_value_return (val);
3618     }
3619   else
3620     {
3621       /* No hard reg used; calculate value into hard return reg.  */
3622       expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3623       expand_value_return (result_rtl);
3624     }
3625 }
3626 
3627 /* Expand a clobber of LHS.  If LHS is stored it in a multi-part
3628    register, tell the rtl optimizers that its value is no longer
3629    needed.  */
3630 
3631 static void
expand_clobber(tree lhs)3632 expand_clobber (tree lhs)
3633 {
3634   if (DECL_P (lhs))
3635     {
3636       rtx decl_rtl = DECL_RTL_IF_SET (lhs);
3637       if (decl_rtl && REG_P (decl_rtl))
3638 	{
3639 	  machine_mode decl_mode = GET_MODE (decl_rtl);
3640 	  if (maybe_gt (GET_MODE_SIZE (decl_mode),
3641 			REGMODE_NATURAL_SIZE (decl_mode)))
3642 	    emit_clobber (decl_rtl);
3643 	}
3644     }
3645 }
3646 
3647 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3648    STMT that doesn't require special handling for outgoing edges.  That
3649    is no tailcalls and no GIMPLE_COND.  */
3650 
3651 static void
expand_gimple_stmt_1(gimple * stmt)3652 expand_gimple_stmt_1 (gimple *stmt)
3653 {
3654   tree op0;
3655 
3656   set_curr_insn_location (gimple_location (stmt));
3657 
3658   switch (gimple_code (stmt))
3659     {
3660     case GIMPLE_GOTO:
3661       op0 = gimple_goto_dest (stmt);
3662       if (TREE_CODE (op0) == LABEL_DECL)
3663 	expand_goto (op0);
3664       else
3665 	expand_computed_goto (op0);
3666       break;
3667     case GIMPLE_LABEL:
3668       expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3669       break;
3670     case GIMPLE_NOP:
3671     case GIMPLE_PREDICT:
3672       break;
3673     case GIMPLE_SWITCH:
3674       {
3675 	gswitch *swtch = as_a <gswitch *> (stmt);
3676 	if (gimple_switch_num_labels (swtch) == 1)
3677 	  expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3678 	else
3679 	  expand_case (swtch);
3680       }
3681       break;
3682     case GIMPLE_ASM:
3683       expand_asm_stmt (as_a <gasm *> (stmt));
3684       break;
3685     case GIMPLE_CALL:
3686       expand_call_stmt (as_a <gcall *> (stmt));
3687       break;
3688 
3689     case GIMPLE_RETURN:
3690       {
3691 	op0 = gimple_return_retval (as_a <greturn *> (stmt));
3692 
3693 	/* If a return doesn't have a location, it very likely represents
3694 	   multiple user returns so we cannot let it inherit the location
3695 	   of the last statement of the previous basic block in RTL.  */
3696 	if (!gimple_has_location (stmt))
3697 	  set_curr_insn_location (cfun->function_end_locus);
3698 
3699 	if (op0 && op0 != error_mark_node)
3700 	  {
3701 	    tree result = DECL_RESULT (current_function_decl);
3702 
3703 	    /* If we are not returning the current function's RESULT_DECL,
3704 	       build an assignment to it.  */
3705 	    if (op0 != result)
3706 	      {
3707 		/* I believe that a function's RESULT_DECL is unique.  */
3708 		gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3709 
3710 		/* ??? We'd like to use simply expand_assignment here,
3711 		   but this fails if the value is of BLKmode but the return
3712 		   decl is a register.  expand_return has special handling
3713 		   for this combination, which eventually should move
3714 		   to common code.  See comments there.  Until then, let's
3715 		   build a modify expression :-/  */
3716 		op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3717 			      result, op0);
3718 	      }
3719 	  }
3720 
3721 	if (!op0)
3722 	  expand_null_return ();
3723 	else
3724 	  expand_return (op0);
3725       }
3726       break;
3727 
3728     case GIMPLE_ASSIGN:
3729       {
3730 	gassign *assign_stmt = as_a <gassign *> (stmt);
3731 	tree lhs = gimple_assign_lhs (assign_stmt);
3732 
3733 	/* Tree expand used to fiddle with |= and &= of two bitfield
3734 	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
3735 	   of binary assigns must be a gimple reg.  */
3736 
3737 	if (TREE_CODE (lhs) != SSA_NAME
3738 	    || get_gimple_rhs_class (gimple_expr_code (stmt))
3739 	       == GIMPLE_SINGLE_RHS)
3740 	  {
3741 	    tree rhs = gimple_assign_rhs1 (assign_stmt);
3742 	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3743 			== GIMPLE_SINGLE_RHS);
3744 	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3745 		/* Do not put locations on possibly shared trees.  */
3746 		&& !is_gimple_min_invariant (rhs))
3747 	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3748 	    if (TREE_CLOBBER_P (rhs))
3749 	      /* This is a clobber to mark the going out of scope for
3750 		 this LHS.  */
3751 	      expand_clobber (lhs);
3752 	    else
3753 	      expand_assignment (lhs, rhs,
3754 				 gimple_assign_nontemporal_move_p (
3755 				   assign_stmt));
3756 	  }
3757 	else
3758 	  {
3759 	    rtx target, temp;
3760 	    bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3761 	    struct separate_ops ops;
3762 	    bool promoted = false;
3763 
3764 	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3765 	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3766 	      promoted = true;
3767 
3768 	    ops.code = gimple_assign_rhs_code (assign_stmt);
3769 	    ops.type = TREE_TYPE (lhs);
3770 	    switch (get_gimple_rhs_class (ops.code))
3771 	      {
3772 		case GIMPLE_TERNARY_RHS:
3773 		  ops.op2 = gimple_assign_rhs3 (assign_stmt);
3774 		  /* Fallthru */
3775 		case GIMPLE_BINARY_RHS:
3776 		  ops.op1 = gimple_assign_rhs2 (assign_stmt);
3777 		  /* Fallthru */
3778 		case GIMPLE_UNARY_RHS:
3779 		  ops.op0 = gimple_assign_rhs1 (assign_stmt);
3780 		  break;
3781 		default:
3782 		  gcc_unreachable ();
3783 	      }
3784 	    ops.location = gimple_location (stmt);
3785 
3786 	    /* If we want to use a nontemporal store, force the value to
3787 	       register first.  If we store into a promoted register,
3788 	       don't directly expand to target.  */
3789 	    temp = nontemporal || promoted ? NULL_RTX : target;
3790 	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3791 				       EXPAND_NORMAL);
3792 
3793 	    if (temp == target)
3794 	      ;
3795 	    else if (promoted)
3796 	      {
3797 		int unsignedp = SUBREG_PROMOTED_SIGN (target);
3798 		/* If TEMP is a VOIDmode constant, use convert_modes to make
3799 		   sure that we properly convert it.  */
3800 		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3801 		  {
3802 		    temp = convert_modes (GET_MODE (target),
3803 					  TYPE_MODE (ops.type),
3804 					  temp, unsignedp);
3805 		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3806 					  GET_MODE (target), temp, unsignedp);
3807 		  }
3808 
3809 		convert_move (SUBREG_REG (target), temp, unsignedp);
3810 	      }
3811 	    else if (nontemporal && emit_storent_insn (target, temp))
3812 	      ;
3813 	    else
3814 	      {
3815 		temp = force_operand (temp, target);
3816 		if (temp != target)
3817 		  emit_move_insn (target, temp);
3818 	      }
3819 	  }
3820       }
3821       break;
3822 
3823     default:
3824       gcc_unreachable ();
3825     }
3826 }
3827 
3828 /* Expand one gimple statement STMT and return the last RTL instruction
3829    before any of the newly generated ones.
3830 
3831    In addition to generating the necessary RTL instructions this also
3832    sets REG_EH_REGION notes if necessary and sets the current source
3833    location for diagnostics.  */
3834 
3835 static rtx_insn *
expand_gimple_stmt(gimple * stmt)3836 expand_gimple_stmt (gimple *stmt)
3837 {
3838   location_t saved_location = input_location;
3839   rtx_insn *last = get_last_insn ();
3840   int lp_nr;
3841 
3842   gcc_assert (cfun);
3843 
3844   /* We need to save and restore the current source location so that errors
3845      discovered during expansion are emitted with the right location.  But
3846      it would be better if the diagnostic routines used the source location
3847      embedded in the tree nodes rather than globals.  */
3848   if (gimple_has_location (stmt))
3849     input_location = gimple_location (stmt);
3850 
3851   expand_gimple_stmt_1 (stmt);
3852 
3853   /* Free any temporaries used to evaluate this statement.  */
3854   free_temp_slots ();
3855 
3856   input_location = saved_location;
3857 
3858   /* Mark all insns that may trap.  */
3859   lp_nr = lookup_stmt_eh_lp (stmt);
3860   if (lp_nr)
3861     {
3862       rtx_insn *insn;
3863       for (insn = next_real_insn (last); insn;
3864 	   insn = next_real_insn (insn))
3865 	{
3866 	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3867 	      /* If we want exceptions for non-call insns, any
3868 		 may_trap_p instruction may throw.  */
3869 	      && GET_CODE (PATTERN (insn)) != CLOBBER
3870 	      && GET_CODE (PATTERN (insn)) != USE
3871 	      && insn_could_throw_p (insn))
3872 	    make_reg_eh_region_note (insn, 0, lp_nr);
3873 	}
3874     }
3875 
3876   return last;
3877 }
3878 
3879 /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
3880    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
3881    generated a tail call (something that might be denied by the ABI
3882    rules governing the call; see calls.c).
3883 
3884    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3885    can still reach the rest of BB.  The case here is __builtin_sqrt,
3886    where the NaN result goes through the external function (with a
3887    tailcall) and the normal result happens via a sqrt instruction.  */
3888 
3889 static basic_block
expand_gimple_tailcall(basic_block bb,gcall * stmt,bool * can_fallthru)3890 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3891 {
3892   rtx_insn *last2, *last;
3893   edge e;
3894   edge_iterator ei;
3895   profile_probability probability;
3896 
3897   last2 = last = expand_gimple_stmt (stmt);
3898 
3899   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3900     if (CALL_P (last) && SIBLING_CALL_P (last))
3901       goto found;
3902 
3903   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3904 
3905   *can_fallthru = true;
3906   return NULL;
3907 
3908  found:
3909   /* ??? Wouldn't it be better to just reset any pending stack adjust?
3910      Any instructions emitted here are about to be deleted.  */
3911   do_pending_stack_adjust ();
3912 
3913   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
3914   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
3915      EH or abnormal edges, we shouldn't have created a tail call in
3916      the first place.  So it seems to me we should just be removing
3917      all edges here, or redirecting the existing fallthru edge to
3918      the exit block.  */
3919 
3920   probability = profile_probability::never ();
3921 
3922   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3923     {
3924       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3925 	{
3926 	  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3927 	    e->dest->count -= e->count ();
3928 	  probability += e->probability;
3929 	  remove_edge (e);
3930 	}
3931       else
3932 	ei_next (&ei);
3933     }
3934 
3935   /* This is somewhat ugly: the call_expr expander often emits instructions
3936      after the sibcall (to perform the function return).  These confuse the
3937      find_many_sub_basic_blocks code, so we need to get rid of these.  */
3938   last = NEXT_INSN (last);
3939   gcc_assert (BARRIER_P (last));
3940 
3941   *can_fallthru = false;
3942   while (NEXT_INSN (last))
3943     {
3944       /* For instance an sqrt builtin expander expands if with
3945 	 sibcall in the then and label for `else`.  */
3946       if (LABEL_P (NEXT_INSN (last)))
3947 	{
3948 	  *can_fallthru = true;
3949 	  break;
3950 	}
3951       delete_insn (NEXT_INSN (last));
3952     }
3953 
3954   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3955 		 | EDGE_SIBCALL);
3956   e->probability = probability;
3957   BB_END (bb) = last;
3958   update_bb_for_insn (bb);
3959 
3960   if (NEXT_INSN (last))
3961     {
3962       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3963 
3964       last = BB_END (bb);
3965       if (BARRIER_P (last))
3966 	BB_END (bb) = PREV_INSN (last);
3967     }
3968 
3969   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3970 
3971   return bb;
3972 }
3973 
3974 /* Return the difference between the floor and the truncated result of
3975    a signed division by OP1 with remainder MOD.  */
3976 static rtx
floor_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3977 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3978 {
3979   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3980   return gen_rtx_IF_THEN_ELSE
3981     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3982      gen_rtx_IF_THEN_ELSE
3983      (mode, gen_rtx_LT (BImode,
3984 			gen_rtx_DIV (mode, op1, mod),
3985 			const0_rtx),
3986       constm1_rtx, const0_rtx),
3987      const0_rtx);
3988 }
3989 
3990 /* Return the difference between the ceil and the truncated result of
3991    a signed division by OP1 with remainder MOD.  */
3992 static rtx
ceil_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)3993 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3994 {
3995   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3996   return gen_rtx_IF_THEN_ELSE
3997     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3998      gen_rtx_IF_THEN_ELSE
3999      (mode, gen_rtx_GT (BImode,
4000 			gen_rtx_DIV (mode, op1, mod),
4001 			const0_rtx),
4002       const1_rtx, const0_rtx),
4003      const0_rtx);
4004 }
4005 
4006 /* Return the difference between the ceil and the truncated result of
4007    an unsigned division by OP1 with remainder MOD.  */
4008 static rtx
ceil_udiv_adjust(machine_mode mode,rtx mod,rtx op1 ATTRIBUTE_UNUSED)4009 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
4010 {
4011   /* (mod != 0 ? 1 : 0) */
4012   return gen_rtx_IF_THEN_ELSE
4013     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4014      const1_rtx, const0_rtx);
4015 }
4016 
4017 /* Return the difference between the rounded and the truncated result
4018    of a signed division by OP1 with remainder MOD.  Halfway cases are
4019    rounded away from zero, rather than to the nearest even number.  */
4020 static rtx
round_sdiv_adjust(machine_mode mode,rtx mod,rtx op1)4021 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4022 {
4023   /* (abs (mod) >= abs (op1) - abs (mod)
4024       ? (op1 / mod > 0 ? 1 : -1)
4025       : 0) */
4026   return gen_rtx_IF_THEN_ELSE
4027     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
4028 		       gen_rtx_MINUS (mode,
4029 				      gen_rtx_ABS (mode, op1),
4030 				      gen_rtx_ABS (mode, mod))),
4031      gen_rtx_IF_THEN_ELSE
4032      (mode, gen_rtx_GT (BImode,
4033 			gen_rtx_DIV (mode, op1, mod),
4034 			const0_rtx),
4035       const1_rtx, constm1_rtx),
4036      const0_rtx);
4037 }
4038 
4039 /* Return the difference between the rounded and the truncated result
4040    of a unsigned division by OP1 with remainder MOD.  Halfway cases
4041    are rounded away from zero, rather than to the nearest even
4042    number.  */
4043 static rtx
round_udiv_adjust(machine_mode mode,rtx mod,rtx op1)4044 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
4045 {
4046   /* (mod >= op1 - mod ? 1 : 0) */
4047   return gen_rtx_IF_THEN_ELSE
4048     (mode, gen_rtx_GE (BImode, mod,
4049 		       gen_rtx_MINUS (mode, op1, mod)),
4050      const1_rtx, const0_rtx);
4051 }
4052 
4053 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4054    any rtl.  */
4055 
4056 static rtx
convert_debug_memory_address(scalar_int_mode mode,rtx x,addr_space_t as)4057 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4058 			      addr_space_t as)
4059 {
4060 #ifndef POINTERS_EXTEND_UNSIGNED
4061   gcc_assert (mode == Pmode
4062 	      || mode == targetm.addr_space.address_mode (as));
4063   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4064 #else
4065   rtx temp;
4066 
4067   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4068 
4069   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4070     return x;
4071 
4072   /* X must have some form of address mode already.  */
4073   scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4074   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4075     x = lowpart_subreg (mode, x, xmode);
4076   else if (POINTERS_EXTEND_UNSIGNED > 0)
4077     x = gen_rtx_ZERO_EXTEND (mode, x);
4078   else if (!POINTERS_EXTEND_UNSIGNED)
4079     x = gen_rtx_SIGN_EXTEND (mode, x);
4080   else
4081     {
4082       switch (GET_CODE (x))
4083 	{
4084 	case SUBREG:
4085 	  if ((SUBREG_PROMOTED_VAR_P (x)
4086 	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4087 	       || (GET_CODE (SUBREG_REG (x)) == PLUS
4088 		   && REG_P (XEXP (SUBREG_REG (x), 0))
4089 		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4090 		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4091 	      && GET_MODE (SUBREG_REG (x)) == mode)
4092 	    return SUBREG_REG (x);
4093 	  break;
4094 	case LABEL_REF:
4095 	  temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4096 	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4097 	  return temp;
4098 	case SYMBOL_REF:
4099 	  temp = shallow_copy_rtx (x);
4100 	  PUT_MODE (temp, mode);
4101 	  return temp;
4102 	case CONST:
4103 	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4104 	  if (temp)
4105 	    temp = gen_rtx_CONST (mode, temp);
4106 	  return temp;
4107 	case PLUS:
4108 	case MINUS:
4109 	  if (CONST_INT_P (XEXP (x, 1)))
4110 	    {
4111 	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4112 	      if (temp)
4113 		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4114 	    }
4115 	  break;
4116 	default:
4117 	  break;
4118 	}
4119       /* Don't know how to express ptr_extend as operation in debug info.  */
4120       return NULL;
4121     }
4122 #endif /* POINTERS_EXTEND_UNSIGNED */
4123 
4124   return x;
4125 }
4126 
4127 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4128    by avoid_deep_ter_for_debug.  */
4129 
4130 static hash_map<tree, tree> *deep_ter_debug_map;
4131 
4132 /* Split too deep TER chains for debug stmts using debug temporaries.  */
4133 
4134 static void
avoid_deep_ter_for_debug(gimple * stmt,int depth)4135 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4136 {
4137   use_operand_p use_p;
4138   ssa_op_iter iter;
4139   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4140     {
4141       tree use = USE_FROM_PTR (use_p);
4142       if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4143 	continue;
4144       gimple *g = get_gimple_for_ssa_name (use);
4145       if (g == NULL)
4146 	continue;
4147       if (depth > 6 && !stmt_ends_bb_p (g))
4148 	{
4149 	  if (deep_ter_debug_map == NULL)
4150 	    deep_ter_debug_map = new hash_map<tree, tree>;
4151 
4152 	  tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4153 	  if (vexpr != NULL)
4154 	    continue;
4155 	  vexpr = make_node (DEBUG_EXPR_DECL);
4156 	  gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4157 	  DECL_ARTIFICIAL (vexpr) = 1;
4158 	  TREE_TYPE (vexpr) = TREE_TYPE (use);
4159 	  SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4160 	  gimple_stmt_iterator gsi = gsi_for_stmt (g);
4161 	  gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4162 	  avoid_deep_ter_for_debug (def_temp, 0);
4163 	}
4164       else
4165 	avoid_deep_ter_for_debug (g, depth + 1);
4166     }
4167 }
4168 
4169 /* Return an RTX equivalent to the value of the parameter DECL.  */
4170 
4171 static rtx
expand_debug_parm_decl(tree decl)4172 expand_debug_parm_decl (tree decl)
4173 {
4174   rtx incoming = DECL_INCOMING_RTL (decl);
4175 
4176   if (incoming
4177       && GET_MODE (incoming) != BLKmode
4178       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4179 	  || (MEM_P (incoming)
4180 	      && REG_P (XEXP (incoming, 0))
4181 	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
4182     {
4183       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4184 
4185 #ifdef HAVE_window_save
4186       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4187 	 If the target machine has an explicit window save instruction, the
4188 	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
4189       if (REG_P (incoming)
4190 	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4191 	incoming
4192 	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4193 				OUTGOING_REGNO (REGNO (incoming)), 0);
4194       else if (MEM_P (incoming))
4195 	{
4196 	  rtx reg = XEXP (incoming, 0);
4197 	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4198 	    {
4199 	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4200 	      incoming = replace_equiv_address_nv (incoming, reg);
4201 	    }
4202 	  else
4203 	    incoming = copy_rtx (incoming);
4204 	}
4205 #endif
4206 
4207       ENTRY_VALUE_EXP (rtl) = incoming;
4208       return rtl;
4209     }
4210 
4211   if (incoming
4212       && GET_MODE (incoming) != BLKmode
4213       && !TREE_ADDRESSABLE (decl)
4214       && MEM_P (incoming)
4215       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4216 	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
4217 	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4218 	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4219     return copy_rtx (incoming);
4220 
4221   return NULL_RTX;
4222 }
4223 
4224 /* Return an RTX equivalent to the value of the tree expression EXP.  */
4225 
4226 static rtx
expand_debug_expr(tree exp)4227 expand_debug_expr (tree exp)
4228 {
4229   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4230   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4231   machine_mode inner_mode = VOIDmode;
4232   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4233   addr_space_t as;
4234   scalar_int_mode op0_mode, op1_mode, addr_mode;
4235 
4236   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4237     {
4238     case tcc_expression:
4239       switch (TREE_CODE (exp))
4240 	{
4241 	case COND_EXPR:
4242 	case DOT_PROD_EXPR:
4243 	case SAD_EXPR:
4244 	case WIDEN_MULT_PLUS_EXPR:
4245 	case WIDEN_MULT_MINUS_EXPR:
4246 	  goto ternary;
4247 
4248 	case TRUTH_ANDIF_EXPR:
4249 	case TRUTH_ORIF_EXPR:
4250 	case TRUTH_AND_EXPR:
4251 	case TRUTH_OR_EXPR:
4252 	case TRUTH_XOR_EXPR:
4253 	  goto binary;
4254 
4255 	case TRUTH_NOT_EXPR:
4256 	  goto unary;
4257 
4258 	default:
4259 	  break;
4260 	}
4261       break;
4262 
4263     ternary:
4264       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4265       if (!op2)
4266 	return NULL_RTX;
4267       /* Fall through.  */
4268 
4269     binary:
4270     case tcc_binary:
4271       if (mode == BLKmode)
4272 	return NULL_RTX;
4273       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4274       if (!op1)
4275 	return NULL_RTX;
4276       switch (TREE_CODE (exp))
4277 	{
4278 	case LSHIFT_EXPR:
4279 	case RSHIFT_EXPR:
4280 	case LROTATE_EXPR:
4281 	case RROTATE_EXPR:
4282 	case WIDEN_LSHIFT_EXPR:
4283 	  /* Ensure second operand isn't wider than the first one.  */
4284 	  inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4285 	  if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4286 	      && (GET_MODE_UNIT_PRECISION (mode)
4287 		  < GET_MODE_PRECISION (op1_mode)))
4288 	    op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4289 	  break;
4290 	default:
4291 	  break;
4292 	}
4293       /* Fall through.  */
4294 
4295     unary:
4296     case tcc_unary:
4297       if (mode == BLKmode)
4298 	return NULL_RTX;
4299       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4300       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4301       if (!op0)
4302 	return NULL_RTX;
4303       break;
4304 
4305     case tcc_comparison:
4306       unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4307       goto binary;
4308 
4309     case tcc_type:
4310     case tcc_statement:
4311       gcc_unreachable ();
4312 
4313     case tcc_constant:
4314     case tcc_exceptional:
4315     case tcc_declaration:
4316     case tcc_reference:
4317     case tcc_vl_exp:
4318       break;
4319     }
4320 
4321   switch (TREE_CODE (exp))
4322     {
4323     case STRING_CST:
4324       if (!lookup_constant_def (exp))
4325 	{
4326 	  if (strlen (TREE_STRING_POINTER (exp)) + 1
4327 	      != (size_t) TREE_STRING_LENGTH (exp))
4328 	    return NULL_RTX;
4329 	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4330 	  op0 = gen_rtx_MEM (BLKmode, op0);
4331 	  set_mem_attributes (op0, exp, 0);
4332 	  return op0;
4333 	}
4334       /* Fall through.  */
4335 
4336     case INTEGER_CST:
4337     case REAL_CST:
4338     case FIXED_CST:
4339       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4340       return op0;
4341 
4342     case POLY_INT_CST:
4343       return immed_wide_int_const (poly_int_cst_value (exp), mode);
4344 
4345     case COMPLEX_CST:
4346       gcc_assert (COMPLEX_MODE_P (mode));
4347       op0 = expand_debug_expr (TREE_REALPART (exp));
4348       op1 = expand_debug_expr (TREE_IMAGPART (exp));
4349       return gen_rtx_CONCAT (mode, op0, op1);
4350 
4351     case DEBUG_EXPR_DECL:
4352       op0 = DECL_RTL_IF_SET (exp);
4353 
4354       if (op0)
4355 	return op0;
4356 
4357       op0 = gen_rtx_DEBUG_EXPR (mode);
4358       DEBUG_EXPR_TREE_DECL (op0) = exp;
4359       SET_DECL_RTL (exp, op0);
4360 
4361       return op0;
4362 
4363     case VAR_DECL:
4364     case PARM_DECL:
4365     case FUNCTION_DECL:
4366     case LABEL_DECL:
4367     case CONST_DECL:
4368     case RESULT_DECL:
4369       op0 = DECL_RTL_IF_SET (exp);
4370 
4371       /* This decl was probably optimized away.  */
4372       if (!op0
4373 	  /* At least label RTXen are sometimes replaced by
4374 	     NOTE_INSN_DELETED_LABEL.  Any notes here are not
4375 	     handled by copy_rtx.  */
4376 	  || NOTE_P (op0))
4377 	{
4378 	  if (!VAR_P (exp)
4379 	      || DECL_EXTERNAL (exp)
4380 	      || !TREE_STATIC (exp)
4381 	      || !DECL_NAME (exp)
4382 	      || DECL_HARD_REGISTER (exp)
4383 	      || DECL_IN_CONSTANT_POOL (exp)
4384 	      || mode == VOIDmode)
4385 	    return NULL;
4386 
4387 	  op0 = make_decl_rtl_for_debug (exp);
4388 	  if (!MEM_P (op0)
4389 	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4390 	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4391 	    return NULL;
4392 	}
4393       else
4394 	op0 = copy_rtx (op0);
4395 
4396       if (GET_MODE (op0) == BLKmode
4397 	  /* If op0 is not BLKmode, but mode is, adjust_mode
4398 	     below would ICE.  While it is likely a FE bug,
4399 	     try to be robust here.  See PR43166.  */
4400 	  || mode == BLKmode
4401 	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4402 	{
4403 	  gcc_assert (MEM_P (op0));
4404 	  op0 = adjust_address_nv (op0, mode, 0);
4405 	  return op0;
4406 	}
4407 
4408       /* Fall through.  */
4409 
4410     adjust_mode:
4411     case PAREN_EXPR:
4412     CASE_CONVERT:
4413       {
4414 	inner_mode = GET_MODE (op0);
4415 
4416 	if (mode == inner_mode)
4417 	  return op0;
4418 
4419 	if (inner_mode == VOIDmode)
4420 	  {
4421 	    if (TREE_CODE (exp) == SSA_NAME)
4422 	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
4423 	    else
4424 	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4425 	    if (mode == inner_mode)
4426 	      return op0;
4427 	  }
4428 
4429 	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4430 	  {
4431 	    if (GET_MODE_UNIT_BITSIZE (mode)
4432 		== GET_MODE_UNIT_BITSIZE (inner_mode))
4433 	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4434 	    else if (GET_MODE_UNIT_BITSIZE (mode)
4435 		     < GET_MODE_UNIT_BITSIZE (inner_mode))
4436 	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4437 	    else
4438 	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4439 	  }
4440 	else if (FLOAT_MODE_P (mode))
4441 	  {
4442 	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
4443 	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4444 	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4445 	    else
4446 	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4447 	  }
4448 	else if (FLOAT_MODE_P (inner_mode))
4449 	  {
4450 	    if (unsignedp)
4451 	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4452 	    else
4453 	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4454 	  }
4455 	else if (GET_MODE_UNIT_PRECISION (mode)
4456 		 == GET_MODE_UNIT_PRECISION (inner_mode))
4457 	  op0 = lowpart_subreg (mode, op0, inner_mode);
4458 	else if (GET_MODE_UNIT_PRECISION (mode)
4459 		 < GET_MODE_UNIT_PRECISION (inner_mode))
4460 	  op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4461 	else if (UNARY_CLASS_P (exp)
4462 		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4463 		 : unsignedp)
4464 	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4465 	else
4466 	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4467 
4468 	return op0;
4469       }
4470 
4471     case MEM_REF:
4472       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4473 	{
4474 	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4475 				     TREE_OPERAND (exp, 0),
4476 				     TREE_OPERAND (exp, 1));
4477 	  if (newexp)
4478 	    return expand_debug_expr (newexp);
4479 	}
4480       /* FALLTHROUGH */
4481     case INDIRECT_REF:
4482       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4483       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4484       if (!op0)
4485 	return NULL;
4486 
4487       if (TREE_CODE (exp) == MEM_REF)
4488 	{
4489 	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4490 	      || (GET_CODE (op0) == PLUS
4491 		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4492 	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
4493 	       Instead just use get_inner_reference.  */
4494 	    goto component_ref;
4495 
4496 	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4497 	  poly_int64 offset;
4498 	  if (!op1 || !poly_int_rtx_p (op1, &offset))
4499 	    return NULL;
4500 
4501 	  op0 = plus_constant (inner_mode, op0, offset);
4502 	}
4503 
4504       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4505 
4506       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4507 					  op0, as);
4508       if (op0 == NULL_RTX)
4509 	return NULL;
4510 
4511       op0 = gen_rtx_MEM (mode, op0);
4512       set_mem_attributes (op0, exp, 0);
4513       if (TREE_CODE (exp) == MEM_REF
4514 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4515 	set_mem_expr (op0, NULL_TREE);
4516       set_mem_addr_space (op0, as);
4517 
4518       return op0;
4519 
4520     case TARGET_MEM_REF:
4521       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4522 	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4523 	return NULL;
4524 
4525       op0 = expand_debug_expr
4526 	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4527       if (!op0)
4528 	return NULL;
4529 
4530       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4531       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4532 					  op0, as);
4533       if (op0 == NULL_RTX)
4534 	return NULL;
4535 
4536       op0 = gen_rtx_MEM (mode, op0);
4537 
4538       set_mem_attributes (op0, exp, 0);
4539       set_mem_addr_space (op0, as);
4540 
4541       return op0;
4542 
4543     component_ref:
4544     case ARRAY_REF:
4545     case ARRAY_RANGE_REF:
4546     case COMPONENT_REF:
4547     case BIT_FIELD_REF:
4548     case REALPART_EXPR:
4549     case IMAGPART_EXPR:
4550     case VIEW_CONVERT_EXPR:
4551       {
4552 	machine_mode mode1;
4553 	poly_int64 bitsize, bitpos;
4554 	tree offset;
4555 	int reversep, volatilep = 0;
4556 	tree tem
4557 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4558 				 &unsignedp, &reversep, &volatilep);
4559 	rtx orig_op0;
4560 
4561 	if (known_eq (bitsize, 0))
4562 	  return NULL;
4563 
4564 	orig_op0 = op0 = expand_debug_expr (tem);
4565 
4566 	if (!op0)
4567 	  return NULL;
4568 
4569 	if (offset)
4570 	  {
4571 	    machine_mode addrmode, offmode;
4572 
4573 	    if (!MEM_P (op0))
4574 	      return NULL;
4575 
4576 	    op0 = XEXP (op0, 0);
4577 	    addrmode = GET_MODE (op0);
4578 	    if (addrmode == VOIDmode)
4579 	      addrmode = Pmode;
4580 
4581 	    op1 = expand_debug_expr (offset);
4582 	    if (!op1)
4583 	      return NULL;
4584 
4585 	    offmode = GET_MODE (op1);
4586 	    if (offmode == VOIDmode)
4587 	      offmode = TYPE_MODE (TREE_TYPE (offset));
4588 
4589 	    if (addrmode != offmode)
4590 	      op1 = lowpart_subreg (addrmode, op1, offmode);
4591 
4592 	    /* Don't use offset_address here, we don't need a
4593 	       recognizable address, and we don't want to generate
4594 	       code.  */
4595 	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4596 							  op0, op1));
4597 	  }
4598 
4599 	if (MEM_P (op0))
4600 	  {
4601 	    if (mode1 == VOIDmode)
4602 	      {
4603 		if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4604 		  return NULL;
4605 		/* Bitfield.  */
4606 		mode1 = smallest_int_mode_for_size (bitsize);
4607 	      }
4608 	    poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4609 	    if (maybe_ne (bytepos, 0))
4610 	      {
4611 		op0 = adjust_address_nv (op0, mode1, bytepos);
4612 		bitpos = num_trailing_bits (bitpos);
4613 	      }
4614 	    else if (known_eq (bitpos, 0)
4615 		     && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4616 	      op0 = adjust_address_nv (op0, mode, 0);
4617 	    else if (GET_MODE (op0) != mode1)
4618 	      op0 = adjust_address_nv (op0, mode1, 0);
4619 	    else
4620 	      op0 = copy_rtx (op0);
4621 	    if (op0 == orig_op0)
4622 	      op0 = shallow_copy_rtx (op0);
4623 	    if (TREE_CODE (tem) != SSA_NAME)
4624 	      set_mem_attributes (op0, exp, 0);
4625 	  }
4626 
4627 	if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4628 	  return op0;
4629 
4630 	if (maybe_lt (bitpos, 0))
4631           return NULL;
4632 
4633 	if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4634 	  return NULL;
4635 
4636 	poly_int64 bytepos;
4637 	if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4638 	    && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4639 	  {
4640 	    machine_mode opmode = GET_MODE (op0);
4641 
4642 	    if (opmode == VOIDmode)
4643 	      opmode = TYPE_MODE (TREE_TYPE (tem));
4644 
4645 	    /* This condition may hold if we're expanding the address
4646 	       right past the end of an array that turned out not to
4647 	       be addressable (i.e., the address was only computed in
4648 	       debug stmts).  The gen_subreg below would rightfully
4649 	       crash, and the address doesn't really exist, so just
4650 	       drop it.  */
4651 	    if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4652 	      return NULL;
4653 
4654 	    if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4655 	      return simplify_gen_subreg (mode, op0, opmode, bytepos);
4656 	  }
4657 
4658 	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4659 				     && TYPE_UNSIGNED (TREE_TYPE (exp))
4660 				     ? SIGN_EXTRACT
4661 				     : ZERO_EXTRACT, mode,
4662 				     GET_MODE (op0) != VOIDmode
4663 				     ? GET_MODE (op0)
4664 				     : TYPE_MODE (TREE_TYPE (tem)),
4665 				     op0, gen_int_mode (bitsize, word_mode),
4666 				     gen_int_mode (bitpos, word_mode));
4667       }
4668 
4669     case ABS_EXPR:
4670     case ABSU_EXPR:
4671       return simplify_gen_unary (ABS, mode, op0, mode);
4672 
4673     case NEGATE_EXPR:
4674       return simplify_gen_unary (NEG, mode, op0, mode);
4675 
4676     case BIT_NOT_EXPR:
4677       return simplify_gen_unary (NOT, mode, op0, mode);
4678 
4679     case FLOAT_EXPR:
4680       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4681 									 0)))
4682 				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4683 				 inner_mode);
4684 
4685     case FIX_TRUNC_EXPR:
4686       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4687 				 inner_mode);
4688 
4689     case POINTER_PLUS_EXPR:
4690       /* For the rare target where pointers are not the same size as
4691 	 size_t, we need to check for mis-matched modes and correct
4692 	 the addend.  */
4693       if (op0 && op1
4694 	  && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4695 	  && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4696 	  && op0_mode != op1_mode)
4697 	{
4698 	  if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4699 	      /* If OP0 is a partial mode, then we must truncate, even
4700 		 if it has the same bitsize as OP1 as GCC's
4701 		 representation of partial modes is opaque.  */
4702 	      || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4703 		  && (GET_MODE_BITSIZE (op0_mode)
4704 		      == GET_MODE_BITSIZE (op1_mode))))
4705 	    op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4706 	  else
4707 	    /* We always sign-extend, regardless of the signedness of
4708 	       the operand, because the operand is always unsigned
4709 	       here even if the original C expression is signed.  */
4710 	    op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4711 	}
4712       /* Fall through.  */
4713     case PLUS_EXPR:
4714       return simplify_gen_binary (PLUS, mode, op0, op1);
4715 
4716     case MINUS_EXPR:
4717     case POINTER_DIFF_EXPR:
4718       return simplify_gen_binary (MINUS, mode, op0, op1);
4719 
4720     case MULT_EXPR:
4721       return simplify_gen_binary (MULT, mode, op0, op1);
4722 
4723     case RDIV_EXPR:
4724     case TRUNC_DIV_EXPR:
4725     case EXACT_DIV_EXPR:
4726       if (unsignedp)
4727 	return simplify_gen_binary (UDIV, mode, op0, op1);
4728       else
4729 	return simplify_gen_binary (DIV, mode, op0, op1);
4730 
4731     case TRUNC_MOD_EXPR:
4732       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4733 
4734     case FLOOR_DIV_EXPR:
4735       if (unsignedp)
4736 	return simplify_gen_binary (UDIV, mode, op0, op1);
4737       else
4738 	{
4739 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4740 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4741 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4742 	  return simplify_gen_binary (PLUS, mode, div, adj);
4743 	}
4744 
4745     case FLOOR_MOD_EXPR:
4746       if (unsignedp)
4747 	return simplify_gen_binary (UMOD, mode, op0, op1);
4748       else
4749 	{
4750 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4751 	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
4752 	  adj = simplify_gen_unary (NEG, mode,
4753 				    simplify_gen_binary (MULT, mode, adj, op1),
4754 				    mode);
4755 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4756 	}
4757 
4758     case CEIL_DIV_EXPR:
4759       if (unsignedp)
4760 	{
4761 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4762 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4763 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4764 	  return simplify_gen_binary (PLUS, mode, div, adj);
4765 	}
4766       else
4767 	{
4768 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4769 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4770 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4771 	  return simplify_gen_binary (PLUS, mode, div, adj);
4772 	}
4773 
4774     case CEIL_MOD_EXPR:
4775       if (unsignedp)
4776 	{
4777 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4778 	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
4779 	  adj = simplify_gen_unary (NEG, mode,
4780 				    simplify_gen_binary (MULT, mode, adj, op1),
4781 				    mode);
4782 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4783 	}
4784       else
4785 	{
4786 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4787 	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4788 	  adj = simplify_gen_unary (NEG, mode,
4789 				    simplify_gen_binary (MULT, mode, adj, op1),
4790 				    mode);
4791 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4792 	}
4793 
4794     case ROUND_DIV_EXPR:
4795       if (unsignedp)
4796 	{
4797 	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4798 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4799 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4800 	  return simplify_gen_binary (PLUS, mode, div, adj);
4801 	}
4802       else
4803 	{
4804 	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4805 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4806 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4807 	  return simplify_gen_binary (PLUS, mode, div, adj);
4808 	}
4809 
4810     case ROUND_MOD_EXPR:
4811       if (unsignedp)
4812 	{
4813 	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4814 	  rtx adj = round_udiv_adjust (mode, mod, op1);
4815 	  adj = simplify_gen_unary (NEG, mode,
4816 				    simplify_gen_binary (MULT, mode, adj, op1),
4817 				    mode);
4818 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4819 	}
4820       else
4821 	{
4822 	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4823 	  rtx adj = round_sdiv_adjust (mode, mod, op1);
4824 	  adj = simplify_gen_unary (NEG, mode,
4825 				    simplify_gen_binary (MULT, mode, adj, op1),
4826 				    mode);
4827 	  return simplify_gen_binary (PLUS, mode, mod, adj);
4828 	}
4829 
4830     case LSHIFT_EXPR:
4831       return simplify_gen_binary (ASHIFT, mode, op0, op1);
4832 
4833     case RSHIFT_EXPR:
4834       if (unsignedp)
4835 	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4836       else
4837 	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4838 
4839     case LROTATE_EXPR:
4840       return simplify_gen_binary (ROTATE, mode, op0, op1);
4841 
4842     case RROTATE_EXPR:
4843       return simplify_gen_binary (ROTATERT, mode, op0, op1);
4844 
4845     case MIN_EXPR:
4846       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4847 
4848     case MAX_EXPR:
4849       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4850 
4851     case BIT_AND_EXPR:
4852     case TRUTH_AND_EXPR:
4853       return simplify_gen_binary (AND, mode, op0, op1);
4854 
4855     case BIT_IOR_EXPR:
4856     case TRUTH_OR_EXPR:
4857       return simplify_gen_binary (IOR, mode, op0, op1);
4858 
4859     case BIT_XOR_EXPR:
4860     case TRUTH_XOR_EXPR:
4861       return simplify_gen_binary (XOR, mode, op0, op1);
4862 
4863     case TRUTH_ANDIF_EXPR:
4864       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4865 
4866     case TRUTH_ORIF_EXPR:
4867       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4868 
4869     case TRUTH_NOT_EXPR:
4870       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4871 
4872     case LT_EXPR:
4873       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4874 				      op0, op1);
4875 
4876     case LE_EXPR:
4877       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4878 				      op0, op1);
4879 
4880     case GT_EXPR:
4881       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4882 				      op0, op1);
4883 
4884     case GE_EXPR:
4885       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4886 				      op0, op1);
4887 
4888     case EQ_EXPR:
4889       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4890 
4891     case NE_EXPR:
4892       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4893 
4894     case UNORDERED_EXPR:
4895       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4896 
4897     case ORDERED_EXPR:
4898       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4899 
4900     case UNLT_EXPR:
4901       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4902 
4903     case UNLE_EXPR:
4904       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4905 
4906     case UNGT_EXPR:
4907       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4908 
4909     case UNGE_EXPR:
4910       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4911 
4912     case UNEQ_EXPR:
4913       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4914 
4915     case LTGT_EXPR:
4916       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4917 
4918     case COND_EXPR:
4919       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4920 
4921     case COMPLEX_EXPR:
4922       gcc_assert (COMPLEX_MODE_P (mode));
4923       if (GET_MODE (op0) == VOIDmode)
4924 	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4925       if (GET_MODE (op1) == VOIDmode)
4926 	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4927       return gen_rtx_CONCAT (mode, op0, op1);
4928 
4929     case CONJ_EXPR:
4930       if (GET_CODE (op0) == CONCAT)
4931 	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4932 			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4933 						   XEXP (op0, 1),
4934 						   GET_MODE_INNER (mode)));
4935       else
4936 	{
4937 	  scalar_mode imode = GET_MODE_INNER (mode);
4938 	  rtx re, im;
4939 
4940 	  if (MEM_P (op0))
4941 	    {
4942 	      re = adjust_address_nv (op0, imode, 0);
4943 	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4944 	    }
4945 	  else
4946 	    {
4947 	      scalar_int_mode ifmode;
4948 	      scalar_int_mode ihmode;
4949 	      rtx halfsize;
4950 	      if (!int_mode_for_mode (mode).exists (&ifmode)
4951 		  || !int_mode_for_mode (imode).exists (&ihmode))
4952 		return NULL;
4953 	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4954 	      re = op0;
4955 	      if (mode != ifmode)
4956 		re = gen_rtx_SUBREG (ifmode, re, 0);
4957 	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4958 	      if (imode != ihmode)
4959 		re = gen_rtx_SUBREG (imode, re, 0);
4960 	      im = copy_rtx (op0);
4961 	      if (mode != ifmode)
4962 		im = gen_rtx_SUBREG (ifmode, im, 0);
4963 	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4964 	      if (imode != ihmode)
4965 		im = gen_rtx_SUBREG (imode, im, 0);
4966 	    }
4967 	  im = gen_rtx_NEG (imode, im);
4968 	  return gen_rtx_CONCAT (mode, re, im);
4969 	}
4970 
4971     case ADDR_EXPR:
4972       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4973       if (!op0 || !MEM_P (op0))
4974 	{
4975 	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4976 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4977 	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4978 	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4979 		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4980 	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4981 
4982 	  if (handled_component_p (TREE_OPERAND (exp, 0)))
4983 	    {
4984 	      poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4985 	      bool reverse;
4986 	      tree decl
4987 		= get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4988 					   &bitsize, &maxsize, &reverse);
4989 	      if ((VAR_P (decl)
4990 		   || TREE_CODE (decl) == PARM_DECL
4991 		   || TREE_CODE (decl) == RESULT_DECL)
4992 		  && (!TREE_ADDRESSABLE (decl)
4993 		      || target_for_debug_bind (decl))
4994 		  && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4995 		  && known_gt (bitsize, 0)
4996 		  && known_eq (bitsize, maxsize))
4997 		{
4998 		  rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4999 		  return plus_constant (mode, base, byteoffset);
5000 		}
5001 	    }
5002 
5003 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
5004 	      && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5005 		 == ADDR_EXPR)
5006 	    {
5007 	      op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5008 						     0));
5009 	      if (op0 != NULL
5010 		  && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
5011 		      || (GET_CODE (op0) == PLUS
5012 			  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
5013 			  && CONST_INT_P (XEXP (op0, 1)))))
5014 		{
5015 		  op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5016 							 1));
5017 		  poly_int64 offset;
5018 		  if (!op1 || !poly_int_rtx_p (op1, &offset))
5019 		    return NULL;
5020 
5021 		  return plus_constant (mode, op0, offset);
5022 		}
5023 	    }
5024 
5025 	  return NULL;
5026 	}
5027 
5028       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
5029       addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5030       op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
5031 
5032       return op0;
5033 
5034     case VECTOR_CST:
5035       {
5036 	unsigned HOST_WIDE_INT i, nelts;
5037 
5038 	if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
5039 	  return NULL;
5040 
5041 	op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5042 
5043 	for (i = 0; i < nelts; ++i)
5044 	  {
5045 	    op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
5046 	    if (!op1)
5047 	      return NULL;
5048 	    XVECEXP (op0, 0, i) = op1;
5049 	  }
5050 
5051 	return op0;
5052       }
5053 
5054     case CONSTRUCTOR:
5055       if (TREE_CLOBBER_P (exp))
5056 	return NULL;
5057       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5058 	{
5059 	  unsigned i;
5060 	  unsigned HOST_WIDE_INT nelts;
5061 	  tree val;
5062 
5063 	  if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5064 	    goto flag_unsupported;
5065 
5066 	  op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5067 
5068 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5069 	    {
5070 	      op1 = expand_debug_expr (val);
5071 	      if (!op1)
5072 		return NULL;
5073 	      XVECEXP (op0, 0, i) = op1;
5074 	    }
5075 
5076 	  if (i < nelts)
5077 	    {
5078 	      op1 = expand_debug_expr
5079 		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5080 
5081 	      if (!op1)
5082 		return NULL;
5083 
5084 	      for (; i < nelts; i++)
5085 		XVECEXP (op0, 0, i) = op1;
5086 	    }
5087 
5088 	  return op0;
5089 	}
5090       else
5091 	goto flag_unsupported;
5092 
5093     case CALL_EXPR:
5094       /* ??? Maybe handle some builtins?  */
5095       return NULL;
5096 
5097     case SSA_NAME:
5098       {
5099 	gimple *g = get_gimple_for_ssa_name (exp);
5100 	if (g)
5101 	  {
5102 	    tree t = NULL_TREE;
5103 	    if (deep_ter_debug_map)
5104 	      {
5105 		tree *slot = deep_ter_debug_map->get (exp);
5106 		if (slot)
5107 		  t = *slot;
5108 	      }
5109 	    if (t == NULL_TREE)
5110 	      t = gimple_assign_rhs_to_tree (g);
5111 	    op0 = expand_debug_expr (t);
5112 	    if (!op0)
5113 	      return NULL;
5114 	  }
5115 	else
5116 	  {
5117 	    /* If this is a reference to an incoming value of
5118 	       parameter that is never used in the code or where the
5119 	       incoming value is never used in the code, use
5120 	       PARM_DECL's DECL_RTL if set.  */
5121 	    if (SSA_NAME_IS_DEFAULT_DEF (exp)
5122 		&& SSA_NAME_VAR (exp)
5123 		&& TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5124 		&& has_zero_uses (exp))
5125 	      {
5126 		op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5127 		if (op0)
5128 		  goto adjust_mode;
5129 		op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5130 		if (op0)
5131 		  goto adjust_mode;
5132 	      }
5133 
5134 	    int part = var_to_partition (SA.map, exp);
5135 
5136 	    if (part == NO_PARTITION)
5137 	      return NULL;
5138 
5139 	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5140 
5141 	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
5142 	  }
5143 	goto adjust_mode;
5144       }
5145 
5146     case ERROR_MARK:
5147       return NULL;
5148 
5149     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
5150     case REALIGN_LOAD_EXPR:
5151     case VEC_COND_EXPR:
5152     case VEC_PACK_FIX_TRUNC_EXPR:
5153     case VEC_PACK_FLOAT_EXPR:
5154     case VEC_PACK_SAT_EXPR:
5155     case VEC_PACK_TRUNC_EXPR:
5156     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
5157     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
5158     case VEC_UNPACK_FLOAT_HI_EXPR:
5159     case VEC_UNPACK_FLOAT_LO_EXPR:
5160     case VEC_UNPACK_HI_EXPR:
5161     case VEC_UNPACK_LO_EXPR:
5162     case VEC_WIDEN_MULT_HI_EXPR:
5163     case VEC_WIDEN_MULT_LO_EXPR:
5164     case VEC_WIDEN_MULT_EVEN_EXPR:
5165     case VEC_WIDEN_MULT_ODD_EXPR:
5166     case VEC_WIDEN_LSHIFT_HI_EXPR:
5167     case VEC_WIDEN_LSHIFT_LO_EXPR:
5168     case VEC_PERM_EXPR:
5169     case VEC_DUPLICATE_EXPR:
5170     case VEC_SERIES_EXPR:
5171     case SAD_EXPR:
5172       return NULL;
5173 
5174     /* Misc codes.  */
5175     case ADDR_SPACE_CONVERT_EXPR:
5176     case FIXED_CONVERT_EXPR:
5177     case OBJ_TYPE_REF:
5178     case WITH_SIZE_EXPR:
5179     case BIT_INSERT_EXPR:
5180       return NULL;
5181 
5182     case DOT_PROD_EXPR:
5183       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5184 	  && SCALAR_INT_MODE_P (mode))
5185 	{
5186 	  op0
5187 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5188 									  0)))
5189 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5190 				  inner_mode);
5191 	  op1
5192 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5193 									  1)))
5194 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5195 				  inner_mode);
5196 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5197 	  return simplify_gen_binary (PLUS, mode, op0, op2);
5198 	}
5199       return NULL;
5200 
5201     case WIDEN_MULT_EXPR:
5202     case WIDEN_MULT_PLUS_EXPR:
5203     case WIDEN_MULT_MINUS_EXPR:
5204       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5205 	  && SCALAR_INT_MODE_P (mode))
5206 	{
5207 	  inner_mode = GET_MODE (op0);
5208 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5209 	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5210 	  else
5211 	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5212 	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5213 	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5214 	  else
5215 	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5216 	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
5217 	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5218 	    return op0;
5219 	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5220 	    return simplify_gen_binary (PLUS, mode, op0, op2);
5221 	  else
5222 	    return simplify_gen_binary (MINUS, mode, op2, op0);
5223 	}
5224       return NULL;
5225 
5226     case MULT_HIGHPART_EXPR:
5227       /* ??? Similar to the above.  */
5228       return NULL;
5229 
5230     case WIDEN_SUM_EXPR:
5231     case WIDEN_LSHIFT_EXPR:
5232       if (SCALAR_INT_MODE_P (GET_MODE (op0))
5233 	  && SCALAR_INT_MODE_P (mode))
5234 	{
5235 	  op0
5236 	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5237 									  0)))
5238 				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5239 				  inner_mode);
5240 	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5241 				      ? ASHIFT : PLUS, mode, op0, op1);
5242 	}
5243       return NULL;
5244 
5245     default:
5246     flag_unsupported:
5247       if (flag_checking)
5248 	{
5249 	  debug_tree (exp);
5250 	  gcc_unreachable ();
5251 	}
5252       return NULL;
5253     }
5254 }
5255 
5256 /* Return an RTX equivalent to the source bind value of the tree expression
5257    EXP.  */
5258 
5259 static rtx
expand_debug_source_expr(tree exp)5260 expand_debug_source_expr (tree exp)
5261 {
5262   rtx op0 = NULL_RTX;
5263   machine_mode mode = VOIDmode, inner_mode;
5264 
5265   switch (TREE_CODE (exp))
5266     {
5267     case VAR_DECL:
5268       if (DECL_ABSTRACT_ORIGIN (exp))
5269 	return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
5270       break;
5271     case PARM_DECL:
5272       {
5273 	mode = DECL_MODE (exp);
5274 	op0 = expand_debug_parm_decl (exp);
5275 	if (op0)
5276 	   break;
5277 	/* See if this isn't an argument that has been completely
5278 	   optimized out.  */
5279 	if (!DECL_RTL_SET_P (exp)
5280 	    && !DECL_INCOMING_RTL (exp)
5281 	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
5282 	  {
5283 	    tree aexp = DECL_ORIGIN (exp);
5284 	    if (DECL_CONTEXT (aexp)
5285 		== DECL_ABSTRACT_ORIGIN (current_function_decl))
5286 	      {
5287 		vec<tree, va_gc> **debug_args;
5288 		unsigned int ix;
5289 		tree ddecl;
5290 		debug_args = decl_debug_args_lookup (current_function_decl);
5291 		if (debug_args != NULL)
5292 		  {
5293 		    for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5294 			 ix += 2)
5295 		      if (ddecl == aexp)
5296 			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5297 		  }
5298 	      }
5299 	  }
5300 	break;
5301       }
5302     default:
5303       break;
5304     }
5305 
5306   if (op0 == NULL_RTX)
5307     return NULL_RTX;
5308 
5309   inner_mode = GET_MODE (op0);
5310   if (mode == inner_mode)
5311     return op0;
5312 
5313   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5314     {
5315       if (GET_MODE_UNIT_BITSIZE (mode)
5316 	  == GET_MODE_UNIT_BITSIZE (inner_mode))
5317 	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5318       else if (GET_MODE_UNIT_BITSIZE (mode)
5319 	       < GET_MODE_UNIT_BITSIZE (inner_mode))
5320 	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5321       else
5322 	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5323     }
5324   else if (FLOAT_MODE_P (mode))
5325     gcc_unreachable ();
5326   else if (FLOAT_MODE_P (inner_mode))
5327     {
5328       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5329 	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5330       else
5331 	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5332     }
5333   else if (GET_MODE_UNIT_PRECISION (mode)
5334 	   == GET_MODE_UNIT_PRECISION (inner_mode))
5335     op0 = lowpart_subreg (mode, op0, inner_mode);
5336   else if (GET_MODE_UNIT_PRECISION (mode)
5337 	   < GET_MODE_UNIT_PRECISION (inner_mode))
5338     op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5339   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5340     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5341   else
5342     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5343 
5344   return op0;
5345 }
5346 
5347 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5348    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5349    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
5350 
5351 static void
avoid_complex_debug_insns(rtx_insn * insn,rtx * exp_p,int depth)5352 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5353 {
5354   rtx exp = *exp_p;
5355 
5356   if (exp == NULL_RTX)
5357     return;
5358 
5359   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5360     return;
5361 
5362   if (depth == 4)
5363     {
5364       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
5365       rtx dval = make_debug_expr_from_rtl (exp);
5366 
5367       /* Emit a debug bind insn before INSN.  */
5368       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5369 				       DEBUG_EXPR_TREE_DECL (dval), exp,
5370 				       VAR_INIT_STATUS_INITIALIZED);
5371 
5372       emit_debug_insn_before (bind, insn);
5373       *exp_p = dval;
5374       return;
5375     }
5376 
5377   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5378   int i, j;
5379   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5380     switch (*format_ptr++)
5381       {
5382       case 'e':
5383 	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5384 	break;
5385 
5386       case 'E':
5387       case 'V':
5388 	for (j = 0; j < XVECLEN (exp, i); j++)
5389 	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5390 	break;
5391 
5392       default:
5393 	break;
5394       }
5395 }
5396 
5397 /* Expand the _LOCs in debug insns.  We run this after expanding all
5398    regular insns, so that any variables referenced in the function
5399    will have their DECL_RTLs set.  */
5400 
5401 static void
expand_debug_locations(void)5402 expand_debug_locations (void)
5403 {
5404   rtx_insn *insn;
5405   rtx_insn *last = get_last_insn ();
5406   int save_strict_alias = flag_strict_aliasing;
5407 
5408   /* New alias sets while setting up memory attributes cause
5409      -fcompare-debug failures, even though it doesn't bring about any
5410      codegen changes.  */
5411   flag_strict_aliasing = 0;
5412 
5413   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5414     if (DEBUG_BIND_INSN_P (insn))
5415       {
5416 	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5417 	rtx val;
5418 	rtx_insn *prev_insn, *insn2;
5419 	machine_mode mode;
5420 
5421 	if (value == NULL_TREE)
5422 	  val = NULL_RTX;
5423 	else
5424 	  {
5425 	    if (INSN_VAR_LOCATION_STATUS (insn)
5426 		== VAR_INIT_STATUS_UNINITIALIZED)
5427 	      val = expand_debug_source_expr (value);
5428 	    /* The avoid_deep_ter_for_debug function inserts
5429 	       debug bind stmts after SSA_NAME definition, with the
5430 	       SSA_NAME as the whole bind location.  Disable temporarily
5431 	       expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5432 	       being defined in this DEBUG_INSN.  */
5433 	    else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5434 	      {
5435 		tree *slot = deep_ter_debug_map->get (value);
5436 		if (slot)
5437 		  {
5438 		    if (*slot == INSN_VAR_LOCATION_DECL (insn))
5439 		      *slot = NULL_TREE;
5440 		    else
5441 		      slot = NULL;
5442 		  }
5443 		val = expand_debug_expr (value);
5444 		if (slot)
5445 		  *slot = INSN_VAR_LOCATION_DECL (insn);
5446 	      }
5447 	    else
5448 	      val = expand_debug_expr (value);
5449 	    gcc_assert (last == get_last_insn ());
5450 	  }
5451 
5452 	if (!val)
5453 	  val = gen_rtx_UNKNOWN_VAR_LOC ();
5454 	else
5455 	  {
5456 	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
5457 
5458 	    gcc_assert (mode == GET_MODE (val)
5459 			|| (GET_MODE (val) == VOIDmode
5460 			    && (CONST_SCALAR_INT_P (val)
5461 				|| GET_CODE (val) == CONST_FIXED
5462 				|| GET_CODE (val) == LABEL_REF)));
5463 	  }
5464 
5465 	INSN_VAR_LOCATION_LOC (insn) = val;
5466 	prev_insn = PREV_INSN (insn);
5467 	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5468 	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5469       }
5470 
5471   flag_strict_aliasing = save_strict_alias;
5472 }
5473 
5474 /* Performs swapping operands of commutative operations to expand
5475    the expensive one first.  */
5476 
5477 static void
reorder_operands(basic_block bb)5478 reorder_operands (basic_block bb)
5479 {
5480   unsigned int *lattice;  /* Hold cost of each statement.  */
5481   unsigned int i = 0, n = 0;
5482   gimple_stmt_iterator gsi;
5483   gimple_seq stmts;
5484   gimple *stmt;
5485   bool swap;
5486   tree op0, op1;
5487   ssa_op_iter iter;
5488   use_operand_p use_p;
5489   gimple *def0, *def1;
5490 
5491   /* Compute cost of each statement using estimate_num_insns.  */
5492   stmts = bb_seq (bb);
5493   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5494     {
5495       stmt = gsi_stmt (gsi);
5496       if (!is_gimple_debug (stmt))
5497         gimple_set_uid (stmt, n++);
5498     }
5499   lattice = XNEWVEC (unsigned int, n);
5500   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5501     {
5502       unsigned cost;
5503       stmt = gsi_stmt (gsi);
5504       if (is_gimple_debug (stmt))
5505 	continue;
5506       cost = estimate_num_insns (stmt, &eni_size_weights);
5507       lattice[i] = cost;
5508       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5509 	{
5510 	  tree use = USE_FROM_PTR (use_p);
5511 	  gimple *def_stmt;
5512 	  if (TREE_CODE (use) != SSA_NAME)
5513 	    continue;
5514 	  def_stmt = get_gimple_for_ssa_name (use);
5515 	  if (!def_stmt)
5516 	    continue;
5517 	  lattice[i] += lattice[gimple_uid (def_stmt)];
5518 	}
5519       i++;
5520       if (!is_gimple_assign (stmt)
5521 	  || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5522 	continue;
5523       op0 = gimple_op (stmt, 1);
5524       op1 = gimple_op (stmt, 2);
5525       if (TREE_CODE (op0) != SSA_NAME
5526 	  || TREE_CODE (op1) != SSA_NAME)
5527 	continue;
5528       /* Swap operands if the second one is more expensive.  */
5529       def0 = get_gimple_for_ssa_name (op0);
5530       def1 = get_gimple_for_ssa_name (op1);
5531       if (!def1)
5532 	continue;
5533       swap = false;
5534       if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5535 	swap = true;
5536       if (swap)
5537 	{
5538 	  if (dump_file && (dump_flags & TDF_DETAILS))
5539 	    {
5540 	      fprintf (dump_file, "Swap operands in stmt:\n");
5541 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5542 	      fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5543 		       def0 ? lattice[gimple_uid (def0)] : 0,
5544 		       lattice[gimple_uid (def1)]);
5545 	    }
5546 	  swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5547 			     gimple_assign_rhs2_ptr (stmt));
5548 	}
5549     }
5550   XDELETE (lattice);
5551 }
5552 
5553 /* Expand basic block BB from GIMPLE trees to RTL.  */
5554 
5555 static basic_block
expand_gimple_basic_block(basic_block bb,bool disable_tail_calls)5556 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5557 {
5558   gimple_stmt_iterator gsi;
5559   gimple_seq stmts;
5560   gimple *stmt = NULL;
5561   rtx_note *note = NULL;
5562   rtx_insn *last;
5563   edge e;
5564   edge_iterator ei;
5565 
5566   if (dump_file)
5567     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5568 	     bb->index);
5569 
5570   /* Note that since we are now transitioning from GIMPLE to RTL, we
5571      cannot use the gsi_*_bb() routines because they expect the basic
5572      block to be in GIMPLE, instead of RTL.  Therefore, we need to
5573      access the BB sequence directly.  */
5574   if (optimize)
5575     reorder_operands (bb);
5576   stmts = bb_seq (bb);
5577   bb->il.gimple.seq = NULL;
5578   bb->il.gimple.phi_nodes = NULL;
5579   rtl_profile_for_bb (bb);
5580   init_rtl_bb_info (bb);
5581   bb->flags |= BB_RTL;
5582 
5583   /* Remove the RETURN_EXPR if we may fall though to the exit
5584      instead.  */
5585   gsi = gsi_last (stmts);
5586   if (!gsi_end_p (gsi)
5587       && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5588     {
5589       greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5590 
5591       gcc_assert (single_succ_p (bb));
5592       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5593 
5594       if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5595 	  && !gimple_return_retval (ret_stmt))
5596 	{
5597 	  gsi_remove (&gsi, false);
5598 	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5599 	}
5600     }
5601 
5602   gsi = gsi_start (stmts);
5603   if (!gsi_end_p (gsi))
5604     {
5605       stmt = gsi_stmt (gsi);
5606       if (gimple_code (stmt) != GIMPLE_LABEL)
5607 	stmt = NULL;
5608     }
5609 
5610   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5611 
5612   if (stmt || elt)
5613     {
5614       gcc_checking_assert (!note);
5615       last = get_last_insn ();
5616 
5617       if (stmt)
5618 	{
5619 	  expand_gimple_stmt (stmt);
5620 	  gsi_next (&gsi);
5621 	}
5622 
5623       if (elt)
5624 	emit_label (*elt);
5625 
5626       BB_HEAD (bb) = NEXT_INSN (last);
5627       if (NOTE_P (BB_HEAD (bb)))
5628 	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5629       gcc_assert (LABEL_P (BB_HEAD (bb)));
5630       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5631 
5632       maybe_dump_rtl_for_gimple_stmt (stmt, last);
5633     }
5634   else
5635     BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5636 
5637   if (note)
5638     NOTE_BASIC_BLOCK (note) = bb;
5639 
5640   for (; !gsi_end_p (gsi); gsi_next (&gsi))
5641     {
5642       basic_block new_bb;
5643 
5644       stmt = gsi_stmt (gsi);
5645 
5646       /* If this statement is a non-debug one, and we generate debug
5647 	 insns, then this one might be the last real use of a TERed
5648 	 SSA_NAME, but where there are still some debug uses further
5649 	 down.  Expanding the current SSA name in such further debug
5650 	 uses by their RHS might lead to wrong debug info, as coalescing
5651 	 might make the operands of such RHS be placed into the same
5652 	 pseudo as something else.  Like so:
5653 	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
5654 	   use(a_1);
5655 	   a_2 = ...
5656            #DEBUG ... => a_1
5657 	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5658 	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5659 	 the write to a_2 would actually have clobbered the place which
5660 	 formerly held a_0.
5661 
5662 	 So, instead of that, we recognize the situation, and generate
5663 	 debug temporaries at the last real use of TERed SSA names:
5664 	   a_1 = a_0 + 1;
5665            #DEBUG #D1 => a_1
5666 	   use(a_1);
5667 	   a_2 = ...
5668            #DEBUG ... => #D1
5669 	 */
5670       if (MAY_HAVE_DEBUG_BIND_INSNS
5671 	  && SA.values
5672 	  && !is_gimple_debug (stmt))
5673 	{
5674 	  ssa_op_iter iter;
5675 	  tree op;
5676 	  gimple *def;
5677 
5678 	  location_t sloc = curr_insn_location ();
5679 
5680 	  /* Look for SSA names that have their last use here (TERed
5681 	     names always have only one real use).  */
5682 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5683 	    if ((def = get_gimple_for_ssa_name (op)))
5684 	      {
5685 		imm_use_iterator imm_iter;
5686 		use_operand_p use_p;
5687 		bool have_debug_uses = false;
5688 
5689 		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5690 		  {
5691 		    if (gimple_debug_bind_p (USE_STMT (use_p)))
5692 		      {
5693 			have_debug_uses = true;
5694 			break;
5695 		      }
5696 		  }
5697 
5698 		if (have_debug_uses)
5699 		  {
5700 		    /* OP is a TERed SSA name, with DEF its defining
5701 		       statement, and where OP is used in further debug
5702 		       instructions.  Generate a debug temporary, and
5703 		       replace all uses of OP in debug insns with that
5704 		       temporary.  */
5705 		    gimple *debugstmt;
5706 		    tree value = gimple_assign_rhs_to_tree (def);
5707 		    tree vexpr = make_node (DEBUG_EXPR_DECL);
5708 		    rtx val;
5709 		    machine_mode mode;
5710 
5711 		    set_curr_insn_location (gimple_location (def));
5712 
5713 		    DECL_ARTIFICIAL (vexpr) = 1;
5714 		    TREE_TYPE (vexpr) = TREE_TYPE (value);
5715 		    if (DECL_P (value))
5716 		      mode = DECL_MODE (value);
5717 		    else
5718 		      mode = TYPE_MODE (TREE_TYPE (value));
5719 		    SET_DECL_MODE (vexpr, mode);
5720 
5721 		    val = gen_rtx_VAR_LOCATION
5722 			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5723 
5724 		    emit_debug_insn (val);
5725 
5726 		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5727 		      {
5728 			if (!gimple_debug_bind_p (debugstmt))
5729 			  continue;
5730 
5731 			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5732 			  SET_USE (use_p, vexpr);
5733 
5734 			update_stmt (debugstmt);
5735 		      }
5736 		  }
5737 	      }
5738 	  set_curr_insn_location (sloc);
5739 	}
5740 
5741       currently_expanding_gimple_stmt = stmt;
5742 
5743       /* Expand this statement, then evaluate the resulting RTL and
5744 	 fixup the CFG accordingly.  */
5745       if (gimple_code (stmt) == GIMPLE_COND)
5746 	{
5747 	  new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5748 	  if (new_bb)
5749 	    return new_bb;
5750 	}
5751       else if (is_gimple_debug (stmt))
5752 	{
5753 	  location_t sloc = curr_insn_location ();
5754 	  gimple_stmt_iterator nsi = gsi;
5755 
5756 	  for (;;)
5757 	    {
5758 	      tree var;
5759 	      tree value = NULL_TREE;
5760 	      rtx val = NULL_RTX;
5761 	      machine_mode mode;
5762 
5763 	      if (!gimple_debug_nonbind_marker_p (stmt))
5764 		{
5765 		  if (gimple_debug_bind_p (stmt))
5766 		    {
5767 		      var = gimple_debug_bind_get_var (stmt);
5768 
5769 		      if (TREE_CODE (var) != DEBUG_EXPR_DECL
5770 			  && TREE_CODE (var) != LABEL_DECL
5771 			  && !target_for_debug_bind (var))
5772 			goto delink_debug_stmt;
5773 
5774 		      if (DECL_P (var) && !VECTOR_TYPE_P (TREE_TYPE (var)))
5775 			mode = DECL_MODE (var);
5776 		      else
5777 			mode = TYPE_MODE (TREE_TYPE (var));
5778 
5779 		      if (gimple_debug_bind_has_value_p (stmt))
5780 			value = gimple_debug_bind_get_value (stmt);
5781 
5782 		      val = gen_rtx_VAR_LOCATION
5783 			(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5784 		    }
5785 		  else if (gimple_debug_source_bind_p (stmt))
5786 		    {
5787 		      var = gimple_debug_source_bind_get_var (stmt);
5788 
5789 		      value = gimple_debug_source_bind_get_value (stmt);
5790 
5791 		      if (!VECTOR_TYPE_P (TREE_TYPE (var)))
5792 			mode = DECL_MODE (var);
5793 		      else
5794 			mode = TYPE_MODE (TREE_TYPE (var));
5795 
5796 		      val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5797 						  VAR_INIT_STATUS_UNINITIALIZED);
5798 		    }
5799 		  else
5800 		    gcc_unreachable ();
5801 		}
5802 	      /* If this function was first compiled with markers
5803 		 enabled, but they're now disable (e.g. LTO), drop
5804 		 them on the floor.  */
5805 	      else if (gimple_debug_nonbind_marker_p (stmt)
5806 		       && !MAY_HAVE_DEBUG_MARKER_INSNS)
5807 		goto delink_debug_stmt;
5808 	      else if (gimple_debug_begin_stmt_p (stmt))
5809 		val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5810 	      else if (gimple_debug_inline_entry_p (stmt))
5811 		{
5812 		  tree block = gimple_block (stmt);
5813 
5814 		  if (block)
5815 		    val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5816 		  else
5817 		    goto delink_debug_stmt;
5818 		}
5819 	      else
5820 		gcc_unreachable ();
5821 
5822 	      last = get_last_insn ();
5823 
5824 	      set_curr_insn_location (gimple_location (stmt));
5825 
5826 	      emit_debug_insn (val);
5827 
5828 	      if (dump_file && (dump_flags & TDF_DETAILS))
5829 		{
5830 		  /* We can't dump the insn with a TREE where an RTX
5831 		     is expected.  */
5832 		  if (GET_CODE (val) == VAR_LOCATION)
5833 		    {
5834 		      gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5835 		      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5836 		    }
5837 		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
5838 		  if (GET_CODE (val) == VAR_LOCATION)
5839 		    PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5840 		}
5841 
5842 	    delink_debug_stmt:
5843 	      /* In order not to generate too many debug temporaries,
5844 	         we delink all uses of debug statements we already expanded.
5845 		 Therefore debug statements between definition and real
5846 		 use of TERed SSA names will continue to use the SSA name,
5847 		 and not be replaced with debug temps.  */
5848 	      delink_stmt_imm_use (stmt);
5849 
5850 	      gsi = nsi;
5851 	      gsi_next (&nsi);
5852 	      if (gsi_end_p (nsi))
5853 		break;
5854 	      stmt = gsi_stmt (nsi);
5855 	      if (!is_gimple_debug (stmt))
5856 		break;
5857 	    }
5858 
5859 	  set_curr_insn_location (sloc);
5860 	}
5861       else
5862 	{
5863 	  gcall *call_stmt = dyn_cast <gcall *> (stmt);
5864 	  if (call_stmt
5865 	      && gimple_call_tail_p (call_stmt)
5866 	      && disable_tail_calls)
5867 	    gimple_call_set_tail (call_stmt, false);
5868 
5869 	  if (call_stmt && gimple_call_tail_p (call_stmt))
5870 	    {
5871 	      bool can_fallthru;
5872 	      new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5873 	      if (new_bb)
5874 		{
5875 		  if (can_fallthru)
5876 		    bb = new_bb;
5877 		  else
5878 		    return new_bb;
5879 		}
5880 	    }
5881 	  else
5882 	    {
5883 	      def_operand_p def_p;
5884 	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5885 
5886 	      if (def_p != NULL)
5887 		{
5888 		  /* Ignore this stmt if it is in the list of
5889 		     replaceable expressions.  */
5890 		  if (SA.values
5891 		      && bitmap_bit_p (SA.values,
5892 				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5893 		    continue;
5894 		}
5895 	      last = expand_gimple_stmt (stmt);
5896 	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
5897 	    }
5898 	}
5899     }
5900 
5901   currently_expanding_gimple_stmt = NULL;
5902 
5903   /* Expand implicit goto and convert goto_locus.  */
5904   FOR_EACH_EDGE (e, ei, bb->succs)
5905     {
5906       if (e->goto_locus != UNKNOWN_LOCATION)
5907 	set_curr_insn_location (e->goto_locus);
5908       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5909 	{
5910 	  emit_jump (label_rtx_for_bb (e->dest));
5911 	  e->flags &= ~EDGE_FALLTHRU;
5912 	}
5913     }
5914 
5915   /* Expanded RTL can create a jump in the last instruction of block.
5916      This later might be assumed to be a jump to successor and break edge insertion.
5917      We need to insert dummy move to prevent this. PR41440. */
5918   if (single_succ_p (bb)
5919       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5920       && (last = get_last_insn ())
5921       && (JUMP_P (last)
5922 	  || (DEBUG_INSN_P (last)
5923 	      && JUMP_P (prev_nondebug_insn (last)))))
5924     {
5925       rtx dummy = gen_reg_rtx (SImode);
5926       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5927     }
5928 
5929   do_pending_stack_adjust ();
5930 
5931   /* Find the block tail.  The last insn in the block is the insn
5932      before a barrier and/or table jump insn.  */
5933   last = get_last_insn ();
5934   if (BARRIER_P (last))
5935     last = PREV_INSN (last);
5936   if (JUMP_TABLE_DATA_P (last))
5937     last = PREV_INSN (PREV_INSN (last));
5938   if (BARRIER_P (last))
5939     last = PREV_INSN (last);
5940   BB_END (bb) = last;
5941 
5942   update_bb_for_insn (bb);
5943 
5944   return bb;
5945 }
5946 
5947 
5948 /* Create a basic block for initialization code.  */
5949 
5950 static basic_block
construct_init_block(void)5951 construct_init_block (void)
5952 {
5953   basic_block init_block, first_block;
5954   edge e = NULL;
5955   int flags;
5956 
5957   /* Multiple entry points not supported yet.  */
5958   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5959   init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5960   init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5961   ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5962   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5963 
5964   e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5965 
5966   /* When entry edge points to first basic block, we don't need jump,
5967      otherwise we have to jump into proper target.  */
5968   if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5969     {
5970       tree label = gimple_block_label (e->dest);
5971 
5972       emit_jump (jump_target_rtx (label));
5973       flags = 0;
5974     }
5975   else
5976     flags = EDGE_FALLTHRU;
5977 
5978   init_block = create_basic_block (NEXT_INSN (get_insns ()),
5979 				   get_last_insn (),
5980 				   ENTRY_BLOCK_PTR_FOR_FN (cfun));
5981   init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5982   add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5983   if (e)
5984     {
5985       first_block = e->dest;
5986       redirect_edge_succ (e, init_block);
5987       make_single_succ_edge (init_block, first_block, flags);
5988     }
5989   else
5990     make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5991 			   EDGE_FALLTHRU);
5992 
5993   update_bb_for_insn (init_block);
5994   return init_block;
5995 }
5996 
5997 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5998    found in the block tree.  */
5999 
6000 static void
set_block_levels(tree block,int level)6001 set_block_levels (tree block, int level)
6002 {
6003   while (block)
6004     {
6005       BLOCK_NUMBER (block) = level;
6006       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
6007       block = BLOCK_CHAIN (block);
6008     }
6009 }
6010 
6011 /* Create a block containing landing pads and similar stuff.  */
6012 
6013 static void
construct_exit_block(void)6014 construct_exit_block (void)
6015 {
6016   rtx_insn *head = get_last_insn ();
6017   rtx_insn *end;
6018   basic_block exit_block;
6019   edge e, e2;
6020   unsigned ix;
6021   edge_iterator ei;
6022   basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
6023   rtx_insn *orig_end = BB_END (prev_bb);
6024 
6025   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6026 
6027   /* Make sure the locus is set to the end of the function, so that
6028      epilogue line numbers and warnings are set properly.  */
6029   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
6030     input_location = cfun->function_end_locus;
6031 
6032   /* Generate rtl for function exit.  */
6033   expand_function_end ();
6034 
6035   end = get_last_insn ();
6036   if (head == end)
6037     return;
6038   /* While emitting the function end we could move end of the last basic
6039      block.  */
6040   BB_END (prev_bb) = orig_end;
6041   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
6042     head = NEXT_INSN (head);
6043   /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6044      bb count counting will be confused.  Any instructions before that
6045      label are emitted for the case where PREV_BB falls through into the
6046      exit block, so append those instructions to prev_bb in that case.  */
6047   if (NEXT_INSN (head) != return_label)
6048     {
6049       while (NEXT_INSN (head) != return_label)
6050 	{
6051 	  if (!NOTE_P (NEXT_INSN (head)))
6052 	    BB_END (prev_bb) = NEXT_INSN (head);
6053 	  head = NEXT_INSN (head);
6054 	}
6055     }
6056   exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
6057   exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
6058   add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6059 
6060   ix = 0;
6061   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
6062     {
6063       e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6064       if (!(e->flags & EDGE_ABNORMAL))
6065 	redirect_edge_succ (e, exit_block);
6066       else
6067 	ix++;
6068     }
6069 
6070   e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6071 			     EDGE_FALLTHRU);
6072   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6073     if (e2 != e)
6074       {
6075 	exit_block->count -= e2->count ();
6076       }
6077   update_bb_for_insn (exit_block);
6078 }
6079 
6080 /* Helper function for discover_nonconstant_array_refs.
6081    Look for ARRAY_REF nodes with non-constant indexes and mark them
6082    addressable.  */
6083 
6084 static tree
discover_nonconstant_array_refs_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)6085 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6086 				   void *data ATTRIBUTE_UNUSED)
6087 {
6088   tree t = *tp;
6089 
6090   if (IS_TYPE_OR_DECL_P (t))
6091     *walk_subtrees = 0;
6092   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6093     {
6094       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6095 	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6096 	      && (!TREE_OPERAND (t, 2)
6097 		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6098 	     || (TREE_CODE (t) == COMPONENT_REF
6099 		 && (!TREE_OPERAND (t,2)
6100 		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6101 	     || TREE_CODE (t) == BIT_FIELD_REF
6102 	     || TREE_CODE (t) == REALPART_EXPR
6103 	     || TREE_CODE (t) == IMAGPART_EXPR
6104 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
6105 	     || CONVERT_EXPR_P (t))
6106 	t = TREE_OPERAND (t, 0);
6107 
6108       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6109 	{
6110 	  t = get_base_address (t);
6111 	  if (t && DECL_P (t)
6112               && DECL_MODE (t) != BLKmode)
6113 	    TREE_ADDRESSABLE (t) = 1;
6114 	}
6115 
6116       *walk_subtrees = 0;
6117     }
6118   /* References of size POLY_INT_CST to a fixed-size object must go
6119      through memory.  It's more efficient to force that here than
6120      to create temporary slots on the fly.  */
6121   else if ((TREE_CODE (t) == MEM_REF || TREE_CODE (t) == TARGET_MEM_REF)
6122 	   && TYPE_SIZE (TREE_TYPE (t))
6123 	   && POLY_INT_CST_P (TYPE_SIZE (TREE_TYPE (t))))
6124     {
6125       tree base = get_base_address (t);
6126       if (base
6127 	  && DECL_P (base)
6128 	  && DECL_MODE (base) != BLKmode
6129 	  && GET_MODE_SIZE (DECL_MODE (base)).is_constant ())
6130 	TREE_ADDRESSABLE (base) = 1;
6131       *walk_subtrees = 0;
6132     }
6133 
6134   return NULL_TREE;
6135 }
6136 
6137 /* RTL expansion is not able to compile array references with variable
6138    offsets for arrays stored in single register.  Discover such
6139    expressions and mark variables as addressable to avoid this
6140    scenario.  */
6141 
6142 static void
discover_nonconstant_array_refs(void)6143 discover_nonconstant_array_refs (void)
6144 {
6145   basic_block bb;
6146   gimple_stmt_iterator gsi;
6147 
6148   FOR_EACH_BB_FN (bb, cfun)
6149     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6150       {
6151 	gimple *stmt = gsi_stmt (gsi);
6152 	if (!is_gimple_debug (stmt))
6153 	  {
6154 	    walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6155 	    gcall *call = dyn_cast <gcall *> (stmt);
6156 	    if (call && gimple_call_internal_p (call))
6157 	      switch (gimple_call_internal_fn (call))
6158 		{
6159 		case IFN_LOAD_LANES:
6160 		  /* The source must be a MEM.  */
6161 		  mark_addressable (gimple_call_arg (call, 0));
6162 		  break;
6163 		case IFN_STORE_LANES:
6164 		  /* The destination must be a MEM.  */
6165 		  mark_addressable (gimple_call_lhs (call));
6166 		  break;
6167 		default:
6168 		  break;
6169 		}
6170 	  }
6171       }
6172 }
6173 
6174 /* This function sets crtl->args.internal_arg_pointer to a virtual
6175    register if DRAP is needed.  Local register allocator will replace
6176    virtual_incoming_args_rtx with the virtual register.  */
6177 
6178 static void
expand_stack_alignment(void)6179 expand_stack_alignment (void)
6180 {
6181   rtx drap_rtx;
6182   unsigned int preferred_stack_boundary;
6183 
6184   if (! SUPPORTS_STACK_ALIGNMENT)
6185     return;
6186 
6187   if (cfun->calls_alloca
6188       || cfun->has_nonlocal_label
6189       || crtl->has_nonlocal_goto)
6190     crtl->need_drap = true;
6191 
6192   /* Call update_stack_boundary here again to update incoming stack
6193      boundary.  It may set incoming stack alignment to a different
6194      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
6195      use the minimum incoming stack alignment to check if it is OK
6196      to perform sibcall optimization since sibcall optimization will
6197      only align the outgoing stack to incoming stack boundary.  */
6198   if (targetm.calls.update_stack_boundary)
6199     targetm.calls.update_stack_boundary ();
6200 
6201   /* The incoming stack frame has to be aligned at least at
6202      parm_stack_boundary.  */
6203   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6204 
6205   /* Update crtl->stack_alignment_estimated and use it later to align
6206      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
6207      exceptions since callgraph doesn't collect incoming stack alignment
6208      in this case.  */
6209   if (cfun->can_throw_non_call_exceptions
6210       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6211     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6212   else
6213     preferred_stack_boundary = crtl->preferred_stack_boundary;
6214   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6215     crtl->stack_alignment_estimated = preferred_stack_boundary;
6216   if (preferred_stack_boundary > crtl->stack_alignment_needed)
6217     crtl->stack_alignment_needed = preferred_stack_boundary;
6218 
6219   gcc_assert (crtl->stack_alignment_needed
6220 	      <= crtl->stack_alignment_estimated);
6221 
6222   crtl->stack_realign_needed
6223     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6224   crtl->stack_realign_tried = crtl->stack_realign_needed;
6225 
6226   crtl->stack_realign_processed = true;
6227 
6228   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6229      alignment.  */
6230   gcc_assert (targetm.calls.get_drap_rtx != NULL);
6231   drap_rtx = targetm.calls.get_drap_rtx ();
6232 
6233   /* stack_realign_drap and drap_rtx must match.  */
6234   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6235 
6236   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
6237   if (drap_rtx != NULL)
6238     {
6239       crtl->args.internal_arg_pointer = drap_rtx;
6240 
6241       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6242          needed. */
6243       fixup_tail_calls ();
6244     }
6245 }
6246 
6247 
6248 static void
expand_main_function(void)6249 expand_main_function (void)
6250 {
6251 #if (defined(INVOKE__main)				\
6252      || (!defined(HAS_INIT_SECTION)			\
6253 	 && !defined(INIT_SECTION_ASM_OP)		\
6254 	 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6255   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6256 #endif
6257 }
6258 
6259 
6260 /* Expand code to initialize the stack_protect_guard.  This is invoked at
6261    the beginning of a function to be protected.  */
6262 
6263 static void
stack_protect_prologue(void)6264 stack_protect_prologue (void)
6265 {
6266   tree guard_decl = targetm.stack_protect_guard ();
6267   rtx x, y;
6268 
6269   crtl->stack_protect_guard_decl = guard_decl;
6270   x = expand_normal (crtl->stack_protect_guard);
6271 
6272   if (targetm.have_stack_protect_combined_set () && guard_decl)
6273     {
6274       gcc_assert (DECL_P (guard_decl));
6275       y = DECL_RTL (guard_decl);
6276 
6277       /* Allow the target to compute address of Y and copy it to X without
6278 	 leaking Y into a register.  This combined address + copy pattern
6279 	 allows the target to prevent spilling of any intermediate results by
6280 	 splitting it after register allocator.  */
6281       if (rtx_insn *insn = targetm.gen_stack_protect_combined_set (x, y))
6282 	{
6283 	  emit_insn (insn);
6284 	  return;
6285 	}
6286     }
6287 
6288   if (guard_decl)
6289     y = expand_normal (guard_decl);
6290   else
6291     y = const0_rtx;
6292 
6293   /* Allow the target to copy from Y to X without leaking Y into a
6294      register.  */
6295   if (targetm.have_stack_protect_set ())
6296     if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6297       {
6298 	emit_insn (insn);
6299 	return;
6300       }
6301 
6302   /* Otherwise do a straight move.  */
6303   emit_move_insn (x, y);
6304 }
6305 
6306 /* Translate the intermediate representation contained in the CFG
6307    from GIMPLE trees to RTL.
6308 
6309    We do conversion per basic block and preserve/update the tree CFG.
6310    This implies we have to do some magic as the CFG can simultaneously
6311    consist of basic blocks containing RTL and GIMPLE trees.  This can
6312    confuse the CFG hooks, so be careful to not manipulate CFG during
6313    the expansion.  */
6314 
6315 namespace {
6316 
6317 const pass_data pass_data_expand =
6318 {
6319   RTL_PASS, /* type */
6320   "expand", /* name */
6321   OPTGROUP_NONE, /* optinfo_flags */
6322   TV_EXPAND, /* tv_id */
6323   ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6324     | PROP_gimple_lcx
6325     | PROP_gimple_lvec
6326     | PROP_gimple_lva), /* properties_required */
6327   PROP_rtl, /* properties_provided */
6328   ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6329   0, /* todo_flags_start */
6330   0, /* todo_flags_finish */
6331 };
6332 
6333 class pass_expand : public rtl_opt_pass
6334 {
6335 public:
pass_expand(gcc::context * ctxt)6336   pass_expand (gcc::context *ctxt)
6337     : rtl_opt_pass (pass_data_expand, ctxt)
6338   {}
6339 
6340   /* opt_pass methods: */
6341   virtual unsigned int execute (function *);
6342 
6343 }; // class pass_expand
6344 
6345 unsigned int
execute(function * fun)6346 pass_expand::execute (function *fun)
6347 {
6348   basic_block bb, init_block;
6349   edge_iterator ei;
6350   edge e;
6351   rtx_insn *var_seq, *var_ret_seq;
6352   unsigned i;
6353 
6354   timevar_push (TV_OUT_OF_SSA);
6355   rewrite_out_of_ssa (&SA);
6356   timevar_pop (TV_OUT_OF_SSA);
6357   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6358 
6359   if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6360     {
6361       gimple_stmt_iterator gsi;
6362       FOR_EACH_BB_FN (bb, cfun)
6363 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6364 	  if (gimple_debug_bind_p (gsi_stmt (gsi)))
6365 	    avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6366     }
6367 
6368   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
6369   discover_nonconstant_array_refs ();
6370 
6371   /* Make sure all values used by the optimization passes have sane
6372      defaults.  */
6373   reg_renumber = 0;
6374 
6375   /* Some backends want to know that we are expanding to RTL.  */
6376   currently_expanding_to_rtl = 1;
6377   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
6378   free_dominance_info (CDI_DOMINATORS);
6379 
6380   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6381 
6382   insn_locations_init ();
6383   if (!DECL_IS_BUILTIN (current_function_decl))
6384     {
6385       /* Eventually, all FEs should explicitly set function_start_locus.  */
6386       if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6387 	set_curr_insn_location
6388 	  (DECL_SOURCE_LOCATION (current_function_decl));
6389       else
6390 	set_curr_insn_location (fun->function_start_locus);
6391     }
6392   else
6393     set_curr_insn_location (UNKNOWN_LOCATION);
6394   prologue_location = curr_insn_location ();
6395 
6396 #ifdef INSN_SCHEDULING
6397   init_sched_attrs ();
6398 #endif
6399 
6400   /* Make sure first insn is a note even if we don't want linenums.
6401      This makes sure the first insn will never be deleted.
6402      Also, final expects a note to appear there.  */
6403   emit_note (NOTE_INSN_DELETED);
6404 
6405   targetm.expand_to_rtl_hook ();
6406   crtl->init_stack_alignment ();
6407   fun->cfg->max_jumptable_ents = 0;
6408 
6409   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
6410      of the function section at exapnsion time to predict distance of calls.  */
6411   resolve_unique_section (current_function_decl, 0, flag_function_sections);
6412 
6413   /* Expand the variables recorded during gimple lowering.  */
6414   timevar_push (TV_VAR_EXPAND);
6415   start_sequence ();
6416 
6417   var_ret_seq = expand_used_vars ();
6418 
6419   var_seq = get_insns ();
6420   end_sequence ();
6421   timevar_pop (TV_VAR_EXPAND);
6422 
6423   /* Honor stack protection warnings.  */
6424   if (warn_stack_protect)
6425     {
6426       if (fun->calls_alloca)
6427 	warning (OPT_Wstack_protector,
6428 		 "stack protector not protecting local variables: "
6429 		 "variable length buffer");
6430       if (has_short_buffer && !crtl->stack_protect_guard)
6431 	warning (OPT_Wstack_protector,
6432 		 "stack protector not protecting function: "
6433 		 "all local arrays are less than %d bytes long",
6434 		 (int) param_ssp_buffer_size);
6435     }
6436 
6437   /* Set up parameters and prepare for return, for the function.  */
6438   expand_function_start (current_function_decl);
6439 
6440   /* If we emitted any instructions for setting up the variables,
6441      emit them before the FUNCTION_START note.  */
6442   if (var_seq)
6443     {
6444       emit_insn_before (var_seq, parm_birth_insn);
6445 
6446       /* In expand_function_end we'll insert the alloca save/restore
6447 	 before parm_birth_insn.  We've just insertted an alloca call.
6448 	 Adjust the pointer to match.  */
6449       parm_birth_insn = var_seq;
6450     }
6451 
6452   /* Now propagate the RTL assignment of each partition to the
6453      underlying var of each SSA_NAME.  */
6454   tree name;
6455 
6456   FOR_EACH_SSA_NAME (i, name, cfun)
6457     {
6458       /* We might have generated new SSA names in
6459 	 update_alias_info_with_stack_vars.  They will have a NULL
6460 	 defining statements, and won't be part of the partitioning,
6461 	 so ignore those.  */
6462       if (!SSA_NAME_DEF_STMT (name))
6463 	continue;
6464 
6465       adjust_one_expanded_partition_var (name);
6466     }
6467 
6468   /* Clean up RTL of variables that straddle across multiple
6469      partitions, and check that the rtl of any PARM_DECLs that are not
6470      cleaned up is that of their default defs.  */
6471   FOR_EACH_SSA_NAME (i, name, cfun)
6472     {
6473       int part;
6474 
6475       /* We might have generated new SSA names in
6476 	 update_alias_info_with_stack_vars.  They will have a NULL
6477 	 defining statements, and won't be part of the partitioning,
6478 	 so ignore those.  */
6479       if (!SSA_NAME_DEF_STMT (name))
6480 	continue;
6481       part = var_to_partition (SA.map, name);
6482       if (part == NO_PARTITION)
6483 	continue;
6484 
6485       /* If this decl was marked as living in multiple places, reset
6486 	 this now to NULL.  */
6487       tree var = SSA_NAME_VAR (name);
6488       if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6489 	SET_DECL_RTL (var, NULL);
6490       /* Check that the pseudos chosen by assign_parms are those of
6491 	 the corresponding default defs.  */
6492       else if (SSA_NAME_IS_DEFAULT_DEF (name)
6493 	       && (TREE_CODE (var) == PARM_DECL
6494 		   || TREE_CODE (var) == RESULT_DECL))
6495 	{
6496 	  rtx in = DECL_RTL_IF_SET (var);
6497 	  gcc_assert (in);
6498 	  rtx out = SA.partition_to_pseudo[part];
6499 	  gcc_assert (in == out);
6500 
6501 	  /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6502 	     those expected by debug backends for each parm and for
6503 	     the result.  This is particularly important for stabs,
6504 	     whose register elimination from parm's DECL_RTL may cause
6505 	     -fcompare-debug differences as SET_DECL_RTL changes reg's
6506 	     attrs.  So, make sure the RTL already has the parm as the
6507 	     EXPR, so that it won't change.  */
6508 	  SET_DECL_RTL (var, NULL_RTX);
6509 	  if (MEM_P (in))
6510 	    set_mem_attributes (in, var, true);
6511 	  SET_DECL_RTL (var, in);
6512 	}
6513     }
6514 
6515   /* If this function is `main', emit a call to `__main'
6516      to run global initializers, etc.  */
6517   if (DECL_NAME (current_function_decl)
6518       && MAIN_NAME_P (DECL_NAME (current_function_decl))
6519       && DECL_FILE_SCOPE_P (current_function_decl))
6520     expand_main_function ();
6521 
6522   /* Initialize the stack_protect_guard field.  This must happen after the
6523      call to __main (if any) so that the external decl is initialized.  */
6524   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6525     stack_protect_prologue ();
6526 
6527   expand_phi_nodes (&SA);
6528 
6529   /* Release any stale SSA redirection data.  */
6530   redirect_edge_var_map_empty ();
6531 
6532   /* Register rtl specific functions for cfg.  */
6533   rtl_register_cfg_hooks ();
6534 
6535   init_block = construct_init_block ();
6536 
6537   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
6538      remaining edges later.  */
6539   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6540     e->flags &= ~EDGE_EXECUTABLE;
6541 
6542   /* If the function has too many markers, drop them while expanding.  */
6543   if (cfun->debug_marker_count
6544       >= param_max_debug_marker_count)
6545     cfun->debug_nonbind_markers = false;
6546 
6547   lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6548   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6549 		  next_bb)
6550     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6551 
6552   if (MAY_HAVE_DEBUG_BIND_INSNS)
6553     expand_debug_locations ();
6554 
6555   if (deep_ter_debug_map)
6556     {
6557       delete deep_ter_debug_map;
6558       deep_ter_debug_map = NULL;
6559     }
6560 
6561   /* Free stuff we no longer need after GIMPLE optimizations.  */
6562   free_dominance_info (CDI_DOMINATORS);
6563   free_dominance_info (CDI_POST_DOMINATORS);
6564   delete_tree_cfg_annotations (fun);
6565 
6566   timevar_push (TV_OUT_OF_SSA);
6567   finish_out_of_ssa (&SA);
6568   timevar_pop (TV_OUT_OF_SSA);
6569 
6570   timevar_push (TV_POST_EXPAND);
6571   /* We are no longer in SSA form.  */
6572   fun->gimple_df->in_ssa_p = false;
6573   loops_state_clear (LOOP_CLOSED_SSA);
6574 
6575   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6576      conservatively to true until they are all profile aware.  */
6577   delete lab_rtx_for_bb;
6578   free_histograms (fun);
6579 
6580   construct_exit_block ();
6581   insn_locations_finalize ();
6582 
6583   if (var_ret_seq)
6584     {
6585       rtx_insn *after = return_label;
6586       rtx_insn *next = NEXT_INSN (after);
6587       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6588 	after = next;
6589       emit_insn_after (var_ret_seq, after);
6590     }
6591 
6592   /* Zap the tree EH table.  */
6593   set_eh_throw_stmt_table (fun, NULL);
6594 
6595   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6596      split edges which edge insertions might do.  */
6597   rebuild_jump_labels (get_insns ());
6598 
6599   /* If we have a single successor to the entry block, put the pending insns
6600      after parm birth, but before NOTE_INSNS_FUNCTION_BEG.  */
6601   if (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6602     {
6603       edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
6604       if (e->insns.r)
6605 	{
6606 	  rtx_insn *insns = e->insns.r;
6607 	  e->insns.r = NULL;
6608 	  rebuild_jump_labels_chain (insns);
6609 	  if (NOTE_P (parm_birth_insn)
6610 	      && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6611 	    emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6612 	  else
6613 	    emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6614 	}
6615     }
6616 
6617   /* Otherwise, as well as for other edges, take the usual way.  */
6618   commit_edge_insertions ();
6619 
6620   /* We're done expanding trees to RTL.  */
6621   currently_expanding_to_rtl = 0;
6622 
6623   flush_mark_addressable_queue ();
6624 
6625   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6626 		  EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6627     {
6628       edge e;
6629       edge_iterator ei;
6630       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6631 	{
6632 	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
6633 	  e->flags &= ~EDGE_EXECUTABLE;
6634 
6635 	  /* At the moment not all abnormal edges match the RTL
6636 	     representation.  It is safe to remove them here as
6637 	     find_many_sub_basic_blocks will rediscover them.
6638 	     In the future we should get this fixed properly.  */
6639 	  if ((e->flags & EDGE_ABNORMAL)
6640 	      && !(e->flags & EDGE_SIBCALL))
6641 	    remove_edge (e);
6642 	  else
6643 	    ei_next (&ei);
6644 	}
6645     }
6646 
6647   auto_sbitmap blocks (last_basic_block_for_fn (fun));
6648   bitmap_ones (blocks);
6649   find_many_sub_basic_blocks (blocks);
6650   purge_all_dead_edges ();
6651 
6652   /* After initial rtl generation, call back to finish generating
6653      exception support code.  We need to do this before cleaning up
6654      the CFG as the code does not expect dead landing pads.  */
6655   if (fun->eh->region_tree != NULL)
6656     finish_eh_generation ();
6657 
6658   /* Call expand_stack_alignment after finishing all
6659      updates to crtl->preferred_stack_boundary.  */
6660   expand_stack_alignment ();
6661 
6662   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6663      function.  */
6664   if (crtl->tail_call_emit)
6665     fixup_tail_calls ();
6666 
6667   /* BB subdivision may have created basic blocks that are only reachable
6668      from unlikely bbs but not marked as such in the profile.  */
6669   if (optimize)
6670     propagate_unlikely_bbs_forward ();
6671 
6672   /* Remove unreachable blocks, otherwise we cannot compute dominators
6673      which are needed for loop state verification.  As a side-effect
6674      this also compacts blocks.
6675      ???  We cannot remove trivially dead insns here as for example
6676      the DRAP reg on i?86 is not magically live at this point.
6677      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
6678   cleanup_cfg (CLEANUP_NO_INSN_DEL);
6679 
6680   checking_verify_flow_info ();
6681 
6682   /* Initialize pseudos allocated for hard registers.  */
6683   emit_initial_value_sets ();
6684 
6685   /* And finally unshare all RTL.  */
6686   unshare_all_rtl ();
6687 
6688   /* There's no need to defer outputting this function any more; we
6689      know we want to output it.  */
6690   DECL_DEFER_OUTPUT (current_function_decl) = 0;
6691 
6692   /* Now that we're done expanding trees to RTL, we shouldn't have any
6693      more CONCATs anywhere.  */
6694   generating_concat_p = 0;
6695 
6696   if (dump_file)
6697     {
6698       fprintf (dump_file,
6699 	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6700       /* And the pass manager will dump RTL for us.  */
6701     }
6702 
6703   /* If we're emitting a nested function, make sure its parent gets
6704      emitted as well.  Doing otherwise confuses debug info.  */
6705     {
6706       tree parent;
6707       for (parent = DECL_CONTEXT (current_function_decl);
6708 	   parent != NULL_TREE;
6709 	   parent = get_containing_scope (parent))
6710 	if (TREE_CODE (parent) == FUNCTION_DECL)
6711 	  TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6712     }
6713 
6714   TREE_ASM_WRITTEN (current_function_decl) = 1;
6715 
6716   /* After expanding, the return labels are no longer needed. */
6717   return_label = NULL;
6718   naked_return_label = NULL;
6719 
6720   /* After expanding, the tm_restart map is no longer needed.  */
6721   if (fun->gimple_df->tm_restart)
6722     fun->gimple_df->tm_restart = NULL;
6723 
6724   /* Tag the blocks with a depth number so that change_scope can find
6725      the common parent easily.  */
6726   set_block_levels (DECL_INITIAL (fun->decl), 0);
6727   default_rtl_profile ();
6728 
6729   /* For -dx discard loops now, otherwise IL verify in clean_state will
6730      ICE.  */
6731   if (rtl_dump_and_exit)
6732     {
6733       cfun->curr_properties &= ~PROP_loops;
6734       loop_optimizer_finalize ();
6735     }
6736 
6737   timevar_pop (TV_POST_EXPAND);
6738 
6739   return 0;
6740 }
6741 
6742 } // anon namespace
6743 
6744 rtl_opt_pass *
make_pass_expand(gcc::context * ctxt)6745 make_pass_expand (gcc::context *ctxt)
6746 {
6747   return new pass_expand (ctxt);
6748 }
6749