1 /* Liveness for SSA trees.
2    Copyright (C) 2003-2019 Free Software Foundation, Inc.
3    Contributed by Andrew MacLeod <amacleod@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "timevar.h"
29 #include "ssa.h"
30 #include "cgraph.h"
31 #include "gimple-pretty-print.h"
32 #include "diagnostic-core.h"
33 #include "gimple-iterator.h"
34 #include "tree-dfa.h"
35 #include "dumpfile.h"
36 #include "tree-ssa-live.h"
37 #include "debug.h"
38 #include "tree-ssa.h"
39 #include "ipa-utils.h"
40 #include "cfgloop.h"
41 #include "stringpool.h"
42 #include "attribs.h"
43 #include "optinfo.h"
44 
45 static void verify_live_on_entry (tree_live_info_p);
46 
47 
48 /* VARMAP maintains a mapping from SSA version number to real variables.
49 
50    All SSA_NAMES are divided into partitions.  Initially each ssa_name is the
51    only member of it's own partition.  Coalescing will attempt to group any
52    ssa_names which occur in a copy or in a PHI node into the same partition.
53 
54    At the end of out-of-ssa, each partition becomes a "real" variable and is
55    rewritten as a compiler variable.
56 
57    The var_map data structure is used to manage these partitions.  It allows
58    partitions to be combined, and determines which partition belongs to what
59    ssa_name or variable, and vice versa.  */
60 
61 
62 /* Remove the base table in MAP.  */
63 
64 static void
var_map_base_fini(var_map map)65 var_map_base_fini (var_map map)
66 {
67   /* Free the basevar info if it is present.  */
68   if (map->partition_to_base_index != NULL)
69     {
70       free (map->partition_to_base_index);
71       map->partition_to_base_index = NULL;
72       map->num_basevars = 0;
73     }
74 }
75 /* Create a variable partition map of SIZE for region, initialize and return
76    it.  Region is a loop if LOOP is non-NULL, otherwise is the current
77    function.  */
78 
79 var_map
init_var_map(int size,struct loop * loop)80 init_var_map (int size, struct loop *loop)
81 {
82   var_map map;
83 
84   map = (var_map) xmalloc (sizeof (struct _var_map));
85   map->var_partition = partition_new (size);
86 
87   map->partition_to_view = NULL;
88   map->view_to_partition = NULL;
89   map->num_partitions = size;
90   map->partition_size = size;
91   map->num_basevars = 0;
92   map->partition_to_base_index = NULL;
93   map->vec_bbs = vNULL;
94   if (loop)
95     {
96       map->bmp_bbs = BITMAP_ALLOC (NULL);
97       map->outofssa_p = false;
98       basic_block *bbs = get_loop_body_in_dom_order (loop);
99       for (unsigned i = 0; i < loop->num_nodes; ++i)
100 	{
101 	  bitmap_set_bit (map->bmp_bbs, bbs[i]->index);
102 	  map->vec_bbs.safe_push (bbs[i]);
103 	}
104       free (bbs);
105     }
106   else
107     {
108       map->bmp_bbs = NULL;
109       map->outofssa_p = true;
110       basic_block bb;
111       FOR_EACH_BB_FN (bb, cfun)
112 	map->vec_bbs.safe_push (bb);
113     }
114   return map;
115 }
116 
117 
118 /* Free memory associated with MAP.  */
119 
120 void
delete_var_map(var_map map)121 delete_var_map (var_map map)
122 {
123   var_map_base_fini (map);
124   partition_delete (map->var_partition);
125   free (map->partition_to_view);
126   free (map->view_to_partition);
127   if (map->bmp_bbs)
128     BITMAP_FREE (map->bmp_bbs);
129   map->vec_bbs.release ();
130   free (map);
131 }
132 
133 
134 /* This function will combine the partitions in MAP for VAR1 and VAR2.  It
135    Returns the partition which represents the new partition.  If the two
136    partitions cannot be combined, NO_PARTITION is returned.  */
137 
138 int
var_union(var_map map,tree var1,tree var2)139 var_union (var_map map, tree var1, tree var2)
140 {
141   int p1, p2, p3;
142 
143   gcc_assert (TREE_CODE (var1) == SSA_NAME);
144   gcc_assert (TREE_CODE (var2) == SSA_NAME);
145 
146   /* This is independent of partition_to_view. If partition_to_view is
147      on, then whichever one of these partitions is absorbed will never have a
148      dereference into the partition_to_view array any more.  */
149 
150   p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
151   p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
152 
153   gcc_assert (p1 != NO_PARTITION);
154   gcc_assert (p2 != NO_PARTITION);
155 
156   if (p1 == p2)
157     p3 = p1;
158   else
159     p3 = partition_union (map->var_partition, p1, p2);
160 
161   if (map->partition_to_view)
162     p3 = map->partition_to_view[p3];
163 
164   return p3;
165 }
166 
167 
168 /* Compress the partition numbers in MAP such that they fall in the range
169    0..(num_partitions-1) instead of wherever they turned out during
170    the partitioning exercise.  This removes any references to unused
171    partitions, thereby allowing bitmaps and other vectors to be much
172    denser.
173 
174    This is implemented such that compaction doesn't affect partitioning.
175    Ie., once partitions are created and possibly merged, running one
176    or more different kind of compaction will not affect the partitions
177    themselves.  Their index might change, but all the same variables will
178    still be members of the same partition group.  This allows work on reduced
179    sets, and no loss of information when a larger set is later desired.
180 
181    In particular, coalescing can work on partitions which have 2 or more
182    definitions, and then 'recompact' later to include all the single
183    definitions for assignment to program variables.  */
184 
185 
186 /* Set MAP back to the initial state of having no partition view.  Return a
187    bitmap which has a bit set for each partition number which is in use in the
188    varmap.  */
189 
190 static bitmap
partition_view_init(var_map map)191 partition_view_init (var_map map)
192 {
193   bitmap used;
194   int tmp;
195   unsigned int x;
196 
197   used = BITMAP_ALLOC (NULL);
198 
199   /* Already in a view? Abandon the old one.  */
200   if (map->partition_to_view)
201     {
202       free (map->partition_to_view);
203       map->partition_to_view = NULL;
204     }
205   if (map->view_to_partition)
206     {
207       free (map->view_to_partition);
208       map->view_to_partition = NULL;
209     }
210 
211   /* Find out which partitions are actually referenced.  */
212   for (x = 0; x < map->partition_size; x++)
213     {
214       tmp = partition_find (map->var_partition, x);
215       if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
216 	  && (!has_zero_uses (ssa_name (tmp))
217 	      || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))
218 	      || (SSA_NAME_VAR (ssa_name (tmp))
219 		  && !VAR_P (SSA_NAME_VAR (ssa_name (tmp))))))
220 	bitmap_set_bit (used, tmp);
221     }
222 
223   map->num_partitions = map->partition_size;
224   return used;
225 }
226 
227 
228 /* This routine will finalize the view data for MAP based on the partitions
229    set in SELECTED.  This is either the same bitmap returned from
230    partition_view_init, or a trimmed down version if some of those partitions
231    were not desired in this view.  SELECTED is freed before returning.  */
232 
233 static void
partition_view_fini(var_map map,bitmap selected)234 partition_view_fini (var_map map, bitmap selected)
235 {
236   bitmap_iterator bi;
237   unsigned count, i, x, limit;
238 
239   gcc_assert (selected);
240 
241   count = bitmap_count_bits (selected);
242   limit = map->partition_size;
243 
244   /* If its a one-to-one ratio, we don't need any view compaction.  */
245   if (count < limit)
246     {
247       map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
248       memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
249       map->view_to_partition = (int *)xmalloc (count * sizeof (int));
250 
251       i = 0;
252       /* Give each selected partition an index.  */
253       EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
254 	{
255 	  map->partition_to_view[x] = i;
256 	  map->view_to_partition[i] = x;
257 	  i++;
258 	}
259       gcc_assert (i == count);
260       map->num_partitions = i;
261     }
262 
263   BITMAP_FREE (selected);
264 }
265 
266 
267 /* Create a partition view which includes all the used partitions in MAP.  */
268 
269 void
partition_view_normal(var_map map)270 partition_view_normal (var_map map)
271 {
272   bitmap used;
273 
274   used = partition_view_init (map);
275   partition_view_fini (map, used);
276 
277   var_map_base_fini (map);
278 }
279 
280 
281 /* Create a partition view in MAP which includes just partitions which occur in
282    the bitmap ONLY. If WANT_BASES is true, create the base variable map
283    as well.  */
284 
285 void
partition_view_bitmap(var_map map,bitmap only)286 partition_view_bitmap (var_map map, bitmap only)
287 {
288   bitmap used;
289   bitmap new_partitions = BITMAP_ALLOC (NULL);
290   unsigned x, p;
291   bitmap_iterator bi;
292 
293   used = partition_view_init (map);
294   EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
295     {
296       p = partition_find (map->var_partition, x);
297       gcc_assert (bitmap_bit_p (used, p));
298       bitmap_set_bit (new_partitions, p);
299     }
300   partition_view_fini (map, new_partitions);
301 
302   var_map_base_fini (map);
303 }
304 
305 
306 static bitmap usedvars;
307 
308 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
309    Returns true if VAR wasn't marked before.  */
310 
311 static inline bool
set_is_used(tree var)312 set_is_used (tree var)
313 {
314   return bitmap_set_bit (usedvars, DECL_UID (var));
315 }
316 
317 /* Return true if VAR is marked as used.  */
318 
319 static inline bool
is_used_p(tree var)320 is_used_p (tree var)
321 {
322   return bitmap_bit_p (usedvars, DECL_UID (var));
323 }
324 
325 static inline void mark_all_vars_used (tree *);
326 
327 /* Helper function for mark_all_vars_used, called via walk_tree.  */
328 
329 static tree
mark_all_vars_used_1(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)330 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
331 {
332   tree t = *tp;
333   enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
334   tree b;
335 
336   if (TREE_CODE (t) == SSA_NAME)
337     {
338       *walk_subtrees = 0;
339       t = SSA_NAME_VAR (t);
340       if (!t)
341 	return NULL;
342     }
343 
344   if (IS_EXPR_CODE_CLASS (c)
345       && (b = TREE_BLOCK (t)) != NULL)
346     TREE_USED (b) = true;
347 
348   /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
349      fields do not contain vars.  */
350   if (TREE_CODE (t) == TARGET_MEM_REF)
351     {
352       mark_all_vars_used (&TMR_BASE (t));
353       mark_all_vars_used (&TMR_INDEX (t));
354       mark_all_vars_used (&TMR_INDEX2 (t));
355       *walk_subtrees = 0;
356       return NULL;
357     }
358 
359   /* Only need to mark VAR_DECLS; parameters and return results are not
360      eliminated as unused.  */
361   if (VAR_P (t))
362     {
363       /* When a global var becomes used for the first time also walk its
364          initializer (non global ones don't have any).  */
365       if (set_is_used (t) && is_global_var (t)
366 	  && DECL_CONTEXT (t) == current_function_decl)
367 	mark_all_vars_used (&DECL_INITIAL (t));
368     }
369   /* remove_unused_scope_block_p requires information about labels
370      which are not DECL_IGNORED_P to tell if they might be used in the IL.  */
371   else if (TREE_CODE (t) == LABEL_DECL)
372     /* Although the TREE_USED values that the frontend uses would be
373        acceptable (albeit slightly over-conservative) for our purposes,
374        init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
375        must re-compute it here.  */
376     TREE_USED (t) = 1;
377 
378   if (IS_TYPE_OR_DECL_P (t))
379     *walk_subtrees = 0;
380 
381   return NULL;
382 }
383 
384 /* Mark the scope block SCOPE and its subblocks unused when they can be
385    possibly eliminated if dead.  */
386 
387 static void
mark_scope_block_unused(tree scope)388 mark_scope_block_unused (tree scope)
389 {
390   tree t;
391   TREE_USED (scope) = false;
392   if (!(*debug_hooks->ignore_block) (scope))
393     TREE_USED (scope) = true;
394   for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
395     mark_scope_block_unused (t);
396 }
397 
398 /* Look if the block is dead (by possibly eliminating its dead subblocks)
399    and return true if so.
400    Block is declared dead if:
401      1) No statements are associated with it.
402      2) Declares no live variables
403      3) All subblocks are dead
404 	or there is precisely one subblocks and the block
405 	has same abstract origin as outer block and declares
406 	no variables, so it is pure wrapper.
407    When we are not outputting full debug info, we also eliminate dead variables
408    out of scope blocks to let them to be recycled by GGC and to save copying work
409    done by the inliner.  */
410 
411 static bool
remove_unused_scope_block_p(tree scope,bool in_ctor_dtor_block)412 remove_unused_scope_block_p (tree scope, bool in_ctor_dtor_block)
413 {
414   tree *t, *next;
415   bool unused = !TREE_USED (scope);
416   int nsubblocks = 0;
417 
418   /* For ipa-polymorphic-call.c purposes, preserve blocks:
419      1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones  */
420   if (inlined_polymorphic_ctor_dtor_block_p (scope, true))
421     {
422       in_ctor_dtor_block = true;
423       unused = false;
424     }
425   /* 2) inside such blocks, the outermost block with block_ultimate_origin
426      being a FUNCTION_DECL.  */
427   else if (in_ctor_dtor_block)
428     {
429       tree fn = block_ultimate_origin (scope);
430       if (fn && TREE_CODE (fn) == FUNCTION_DECL)
431 	{
432 	  in_ctor_dtor_block = false;
433 	  unused = false;
434 	}
435     }
436 
437   for (t = &BLOCK_VARS (scope); *t; t = next)
438     {
439       next = &DECL_CHAIN (*t);
440 
441       /* Debug info of nested function refers to the block of the
442 	 function.  We might stil call it even if all statements
443 	 of function it was nested into was elliminated.
444 
445 	 TODO: We can actually look into cgraph to see if function
446 	 will be output to file.  */
447       if (TREE_CODE (*t) == FUNCTION_DECL)
448 	unused = false;
449 
450       /* If a decl has a value expr, we need to instantiate it
451 	 regardless of debug info generation, to avoid codegen
452 	 differences in memory overlap tests.  update_equiv_regs() may
453 	 indirectly call validate_equiv_mem() to test whether a
454 	 SET_DEST overlaps with others, and if the value expr changes
455 	 by virtual register instantiation, we may get end up with
456 	 different results.  */
457       else if (VAR_P (*t) && DECL_HAS_VALUE_EXPR_P (*t))
458 	unused = false;
459 
460       /* Remove everything we don't generate debug info for.  */
461       else if (DECL_IGNORED_P (*t))
462 	{
463 	  *t = DECL_CHAIN (*t);
464 	  next = t;
465 	}
466 
467       /* When we are outputting debug info, we usually want to output
468 	 info about optimized-out variables in the scope blocks.
469 	 Exception are the scope blocks not containing any instructions
470 	 at all so user can't get into the scopes at first place.  */
471       else if (is_used_p (*t))
472 	unused = false;
473       else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
474 	/* For labels that are still used in the IL, the decision to
475 	   preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
476 	   risk having different ordering in debug vs.  non-debug builds
477 	   during inlining or versioning.
478 	   A label appearing here (we have already checked DECL_IGNORED_P)
479 	   should not be used in the IL unless it has been explicitly used
480 	   before, so we use TREE_USED as an approximation.  */
481 	/* In principle, we should do the same here as for the debug case
482 	   below, however, when debugging, there might be additional nested
483 	   levels that keep an upper level with a label live, so we have to
484 	   force this block to be considered used, too.  */
485 	unused = false;
486 
487       /* When we are not doing full debug info, we however can keep around
488 	 only the used variables for cfgexpand's memory packing saving quite
489 	 a lot of memory.
490 
491 	 For sake of -g3, we keep around those vars but we don't count this as
492 	 use of block, so innermost block with no used vars and no instructions
493 	 can be considered dead.  We only want to keep around blocks user can
494 	 breakpoint into and ask about value of optimized out variables.
495 
496 	 Similarly we need to keep around types at least until all
497 	 variables of all nested blocks are gone.  We track no
498 	 information on whether given type is used or not, so we have
499 	 to keep them even when not emitting debug information,
500 	 otherwise we may end up remapping variables and their (local)
501 	 types in different orders depending on whether debug
502 	 information is being generated.  */
503 
504       else if (TREE_CODE (*t) == TYPE_DECL
505 	       || debug_info_level == DINFO_LEVEL_NORMAL
506 	       || debug_info_level == DINFO_LEVEL_VERBOSE)
507 	;
508       else
509 	{
510 	  *t = DECL_CHAIN (*t);
511 	  next = t;
512 	}
513     }
514 
515   for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
516     if (remove_unused_scope_block_p (*t, in_ctor_dtor_block))
517       {
518 	if (BLOCK_SUBBLOCKS (*t))
519 	  {
520 	    tree next = BLOCK_CHAIN (*t);
521 	    tree supercontext = BLOCK_SUPERCONTEXT (*t);
522 
523 	    *t = BLOCK_SUBBLOCKS (*t);
524 	    while (BLOCK_CHAIN (*t))
525 	      {
526 	        BLOCK_SUPERCONTEXT (*t) = supercontext;
527 	        t = &BLOCK_CHAIN (*t);
528 	      }
529 	    BLOCK_CHAIN (*t) = next;
530 	    BLOCK_SUPERCONTEXT (*t) = supercontext;
531 	    t = &BLOCK_CHAIN (*t);
532 	    nsubblocks ++;
533 	  }
534 	else
535 	  *t = BLOCK_CHAIN (*t);
536       }
537     else
538       {
539         t = &BLOCK_CHAIN (*t);
540 	nsubblocks ++;
541       }
542 
543 
544    if (!unused)
545      ;
546    /* Outer scope is always used.  */
547    else if (!BLOCK_SUPERCONTEXT (scope)
548             || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
549      unused = false;
550    /* Innermost blocks with no live variables nor statements can be always
551       eliminated.  */
552    else if (!nsubblocks)
553      ;
554    /* When not generating debug info we can eliminate info on unused
555       variables.  */
556    else if (!flag_auto_profile && debug_info_level == DINFO_LEVEL_NONE
557 	    && !optinfo_wants_inlining_info_p ())
558      {
559        /* Even for -g0 don't prune outer scopes from artificial
560 	  functions, otherwise diagnostics using tree_nonartificial_location
561 	  will not be emitted properly.  */
562        if (inlined_function_outer_scope_p (scope))
563 	 {
564 	   tree ao = BLOCK_ORIGIN (scope);
565 	   if (ao
566 	       && TREE_CODE (ao) == FUNCTION_DECL
567 	       && DECL_DECLARED_INLINE_P (ao)
568 	       && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
569 	     unused = false;
570 	 }
571      }
572    else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
573      unused = false;
574    /* See if this block is important for representation of inlined
575       function.  Inlined functions are always represented by block
576       with block_ultimate_origin being set to FUNCTION_DECL and
577       DECL_SOURCE_LOCATION set, unless they expand to nothing...  */
578    else if (inlined_function_outer_scope_p (scope))
579      unused = false;
580    else
581    /* Verfify that only blocks with source location set
582       are entry points to the inlined functions.  */
583      gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
584 		 == UNKNOWN_LOCATION);
585 
586    TREE_USED (scope) = !unused;
587    return unused;
588 }
589 
590 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
591    eliminated during the tree->rtl conversion process.  */
592 
593 static inline void
mark_all_vars_used(tree * expr_p)594 mark_all_vars_used (tree *expr_p)
595 {
596   walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
597 }
598 
599 /* Helper function for clear_unused_block_pointer, called via walk_tree.  */
600 
601 static tree
clear_unused_block_pointer_1(tree * tp,int *,void *)602 clear_unused_block_pointer_1 (tree *tp, int *, void *)
603 {
604   if (EXPR_P (*tp) && TREE_BLOCK (*tp)
605       && !TREE_USED (TREE_BLOCK (*tp)))
606     TREE_SET_BLOCK (*tp, NULL);
607   return NULL_TREE;
608 }
609 
610 /* Set all block pointer in debug or clobber stmt to NULL if the block
611    is unused, so that they will not be streamed out.  */
612 
613 static void
clear_unused_block_pointer(void)614 clear_unused_block_pointer (void)
615 {
616   basic_block bb;
617   gimple_stmt_iterator gsi;
618 
619   FOR_EACH_BB_FN (bb, cfun)
620     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
621       {
622 	unsigned i;
623 	tree b;
624 	gimple *stmt = gsi_stmt (gsi);
625 
626 	if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
627 	  continue;
628 	b = gimple_block (stmt);
629 	if (b && !TREE_USED (b))
630 	  gimple_set_block (stmt, NULL);
631 	for (i = 0; i < gimple_num_ops (stmt); i++)
632 	  walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
633 		     NULL, NULL);
634       }
635 }
636 
637 /* Dump scope blocks starting at SCOPE to FILE.  INDENT is the
638    indentation level and FLAGS is as in print_generic_expr.  */
639 
640 static void
dump_scope_block(FILE * file,int indent,tree scope,dump_flags_t flags)641 dump_scope_block (FILE *file, int indent, tree scope, dump_flags_t flags)
642 {
643   tree var, t;
644   unsigned int i;
645 
646   fprintf (file, "\n%*s{ Scope block #%i%s",indent, "" , BLOCK_NUMBER (scope),
647   	   TREE_USED (scope) ? "" : " (unused)");
648   if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
649     {
650       expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
651       fprintf (file, " %s:%i", s.file, s.line);
652     }
653   if (BLOCK_ABSTRACT_ORIGIN (scope))
654     {
655       tree origin = block_ultimate_origin (scope);
656       if (origin)
657 	{
658 	  fprintf (file, " Originating from :");
659 	  if (DECL_P (origin))
660 	    print_generic_decl (file, origin, flags);
661 	  else
662 	    fprintf (file, "#%i", BLOCK_NUMBER (origin));
663 	}
664     }
665   if (BLOCK_FRAGMENT_ORIGIN (scope))
666     fprintf (file, " Fragment of : #%i",
667 	     BLOCK_NUMBER (BLOCK_FRAGMENT_ORIGIN (scope)));
668   else if (BLOCK_FRAGMENT_CHAIN (scope))
669     {
670       fprintf (file, " Fragment chain :");
671       for (t = BLOCK_FRAGMENT_CHAIN (scope); t ;
672 	   t = BLOCK_FRAGMENT_CHAIN (t))
673 	fprintf (file, " #%i", BLOCK_NUMBER (t));
674     }
675   fprintf (file, " \n");
676   for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
677     {
678       fprintf (file, "%*s", indent, "");
679       print_generic_decl (file, var, flags);
680       fprintf (file, "\n");
681     }
682   for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
683     {
684       fprintf (file, "%*s",indent, "");
685       print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
686       			  flags);
687       fprintf (file, " (nonlocalized)\n");
688     }
689   for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
690     dump_scope_block (file, indent + 2, t, flags);
691   fprintf (file, "\n%*s}\n",indent, "");
692 }
693 
694 /* Dump the tree of lexical scopes starting at SCOPE to stderr.  FLAGS
695    is as in print_generic_expr.  */
696 
697 DEBUG_FUNCTION void
debug_scope_block(tree scope,dump_flags_t flags)698 debug_scope_block (tree scope, dump_flags_t flags)
699 {
700   dump_scope_block (stderr, 0, scope, flags);
701 }
702 
703 
704 /* Dump the tree of lexical scopes of current_function_decl to FILE.
705    FLAGS is as in print_generic_expr.  */
706 
707 void
dump_scope_blocks(FILE * file,dump_flags_t flags)708 dump_scope_blocks (FILE *file, dump_flags_t flags)
709 {
710   dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
711 }
712 
713 
714 /* Dump the tree of lexical scopes of current_function_decl to stderr.
715    FLAGS is as in print_generic_expr.  */
716 
717 DEBUG_FUNCTION void
debug_scope_blocks(dump_flags_t flags)718 debug_scope_blocks (dump_flags_t flags)
719 {
720   dump_scope_blocks (stderr, flags);
721 }
722 
723 /* Remove local variables that are not referenced in the IL.  */
724 
725 void
remove_unused_locals(void)726 remove_unused_locals (void)
727 {
728   basic_block bb;
729   tree var;
730   unsigned srcidx, dstidx, num;
731   bool have_local_clobbers = false;
732 
733   /* Removing declarations from lexical blocks when not optimizing is
734      not only a waste of time, it actually causes differences in stack
735      layout.  */
736   if (!optimize)
737     return;
738 
739   timevar_push (TV_REMOVE_UNUSED);
740 
741   mark_scope_block_unused (DECL_INITIAL (current_function_decl));
742 
743   usedvars = BITMAP_ALLOC (NULL);
744 
745   /* Walk the CFG marking all referenced symbols.  */
746   FOR_EACH_BB_FN (bb, cfun)
747     {
748       gimple_stmt_iterator gsi;
749       size_t i;
750       edge_iterator ei;
751       edge e;
752 
753       /* Walk the statements.  */
754       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
755 	{
756 	  gimple *stmt = gsi_stmt (gsi);
757 	  tree b = gimple_block (stmt);
758 
759 	  /* If we wanted to mark the block referenced by the inline
760 	     entry point marker as used, this would be a good spot to
761 	     do it.  If the block is not otherwise used, the stmt will
762 	     be cleaned up in clean_unused_block_pointer.  */
763 	  if (is_gimple_debug (stmt))
764 	    continue;
765 
766 	  if (gimple_clobber_p (stmt))
767 	    {
768 	      have_local_clobbers = true;
769 	      continue;
770 	    }
771 
772 	  if (b)
773 	    TREE_USED (b) = true;
774 
775 	  for (i = 0; i < gimple_num_ops (stmt); i++)
776 	    mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
777 	}
778 
779       for (gphi_iterator gpi = gsi_start_phis (bb);
780 	   !gsi_end_p (gpi);
781 	   gsi_next (&gpi))
782         {
783           use_operand_p arg_p;
784           ssa_op_iter i;
785 	  tree def;
786 	  gphi *phi = gpi.phi ();
787 
788 	  if (virtual_operand_p (gimple_phi_result (phi)))
789 	    continue;
790 
791 	  def = gimple_phi_result (phi);
792 	  mark_all_vars_used (&def);
793 
794           FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
795             {
796 	      tree arg = USE_FROM_PTR (arg_p);
797 	      int index = PHI_ARG_INDEX_FROM_USE (arg_p);
798 	      tree block =
799 		LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
800 	      if (block != NULL)
801 		TREE_USED (block) = true;
802 	      mark_all_vars_used (&arg);
803             }
804         }
805 
806       FOR_EACH_EDGE (e, ei, bb->succs)
807 	if (LOCATION_BLOCK (e->goto_locus) != NULL)
808 	  TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
809     }
810 
811   /* We do a two-pass approach about the out-of-scope clobbers.  We want
812      to remove them if they are the only references to a local variable,
813      but we want to retain them when there's any other.  So the first pass
814      ignores them, and the second pass (if there were any) tries to remove
815      them.  */
816   if (have_local_clobbers)
817     FOR_EACH_BB_FN (bb, cfun)
818       {
819 	gimple_stmt_iterator gsi;
820 
821 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
822 	  {
823 	    gimple *stmt = gsi_stmt (gsi);
824 	    tree b = gimple_block (stmt);
825 
826 	    if (gimple_clobber_p (stmt))
827 	      {
828 		tree lhs = gimple_assign_lhs (stmt);
829 		tree base = get_base_address (lhs);
830 		/* Remove clobbers referencing unused vars, or clobbers
831 		   with MEM_REF lhs referencing uninitialized pointers.  */
832 		if ((VAR_P (base) && !is_used_p (base))
833 		    || (TREE_CODE (lhs) == MEM_REF
834 			&& TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
835 			&& SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
836 			&& (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
837 			    != PARM_DECL)))
838 		  {
839 		    unlink_stmt_vdef (stmt);
840 		    gsi_remove (&gsi, true);
841 		    release_defs (stmt);
842 		    continue;
843 		  }
844 		if (b)
845 		  TREE_USED (b) = true;
846 	      }
847 	    gsi_next (&gsi);
848 	  }
849       }
850 
851   if (cfun->has_simduid_loops)
852     {
853       struct loop *loop;
854       FOR_EACH_LOOP (loop, 0)
855 	if (loop->simduid && !is_used_p (loop->simduid))
856 	  loop->simduid = NULL_TREE;
857     }
858 
859   cfun->has_local_explicit_reg_vars = false;
860 
861   /* Remove unmarked local and global vars from local_decls.  */
862   num = vec_safe_length (cfun->local_decls);
863   for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
864     {
865       var = (*cfun->local_decls)[srcidx];
866       if (VAR_P (var))
867 	{
868 	  if (!is_used_p (var))
869 	    {
870 	      tree def;
871 	      if (cfun->nonlocal_goto_save_area
872 		  && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
873 		cfun->nonlocal_goto_save_area = NULL;
874 	      /* Release any default def associated with var.  */
875 	      if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
876 		{
877 		  set_ssa_default_def (cfun, var, NULL_TREE);
878 		  release_ssa_name (def);
879 		}
880 	      continue;
881 	    }
882 	}
883       if (VAR_P (var) && DECL_HARD_REGISTER (var) && !is_global_var (var))
884 	cfun->has_local_explicit_reg_vars = true;
885 
886       if (srcidx != dstidx)
887 	(*cfun->local_decls)[dstidx] = var;
888       dstidx++;
889     }
890   if (dstidx != num)
891     {
892       statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
893       cfun->local_decls->truncate (dstidx);
894     }
895 
896   remove_unused_scope_block_p (DECL_INITIAL (current_function_decl),
897 			       polymorphic_ctor_dtor_p (current_function_decl,
898 							true) != NULL_TREE);
899   clear_unused_block_pointer ();
900 
901   BITMAP_FREE (usedvars);
902 
903   if (dump_file && (dump_flags & TDF_DETAILS))
904     {
905       fprintf (dump_file, "Scope blocks after cleanups:\n");
906       dump_scope_blocks (dump_file, dump_flags);
907     }
908 
909   timevar_pop (TV_REMOVE_UNUSED);
910 }
911 
912 /* Allocate and return a new live range information object base on MAP.  */
913 
914 static tree_live_info_p
new_tree_live_info(var_map map)915 new_tree_live_info (var_map map)
916 {
917   tree_live_info_p live;
918   basic_block bb;
919 
920   live = XNEW (struct tree_live_info_d);
921   live->map = map;
922   live->num_blocks = last_basic_block_for_fn (cfun);
923 
924   bitmap_obstack_initialize (&live->livein_obstack);
925   bitmap_obstack_initialize (&live->liveout_obstack);
926 
927   live->livein = XCNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
928   live->liveout = XCNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
929   for (unsigned i = 0; map->vec_bbs.iterate (i, &bb); ++i)
930     {
931       bitmap_initialize (&live->livein[bb->index], &live->livein_obstack);
932       bitmap_initialize (&live->liveout[bb->index], &live->liveout_obstack);
933     }
934 
935   live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
936   live->stack_top = live->work_stack;
937 
938   live->global = BITMAP_ALLOC (NULL);
939   return live;
940 }
941 
942 
943 /* Free storage for live range info object LIVE.  */
944 
945 void
delete_tree_live_info(tree_live_info_p live)946 delete_tree_live_info (tree_live_info_p live)
947 {
948   if (live->livein)
949     {
950       bitmap_obstack_release (&live->livein_obstack);
951       free (live->livein);
952     }
953   if (live->liveout)
954     {
955       bitmap_obstack_release (&live->liveout_obstack);
956       free (live->liveout);
957     }
958   BITMAP_FREE (live->global);
959   free (live->work_stack);
960   free (live);
961 }
962 
963 
964 /* Visit basic block BB and propagate any required live on entry bits from
965    LIVE into the predecessors.  VISITED is the bitmap of visited blocks.
966    TMP is a temporary work bitmap which is passed in to avoid reallocating
967    it each time.  */
968 
969 static void
loe_visit_block(tree_live_info_p live,basic_block bb,sbitmap visited)970 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited)
971 {
972   edge e;
973   bool change;
974   edge_iterator ei;
975   basic_block pred_bb;
976   bitmap loe;
977 
978   gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
979   bitmap_set_bit (visited, bb->index);
980 
981   loe = live_on_entry (live, bb);
982 
983   FOR_EACH_EDGE (e, ei, bb->preds)
984     {
985       pred_bb = e->src;
986       if (!region_contains_p (live->map, pred_bb))
987 	continue;
988       /* Variables live-on-entry from BB that aren't defined in the
989 	 predecessor block.  This should be the live on entry vars to pred.
990 	 Note that liveout is the DEFs in a block while live on entry is
991 	 being calculated.
992 	 Add these bits to live-on-entry for the pred. if there are any
993 	 changes, and pred_bb has been visited already, add it to the
994 	 revisit stack.  */
995       change = bitmap_ior_and_compl_into (live_on_entry (live, pred_bb),
996 					  loe, &live->liveout[pred_bb->index]);
997       if (change
998 	  && bitmap_bit_p (visited, pred_bb->index))
999 	{
1000 	  bitmap_clear_bit (visited, pred_bb->index);
1001 	  *(live->stack_top)++ = pred_bb->index;
1002 	}
1003     }
1004 }
1005 
1006 
1007 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
1008    of all the variables.  */
1009 
1010 static void
live_worklist(tree_live_info_p live)1011 live_worklist (tree_live_info_p live)
1012 {
1013   unsigned b;
1014   basic_block bb;
1015   auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1016 
1017   bitmap_clear (visited);
1018 
1019   /* Visit region's blocks in reverse order and propagate live on entry values
1020      into the predecessors blocks.  */
1021   for (unsigned i = live->map->vec_bbs.length () - 1;
1022        live->map->vec_bbs.iterate (i, &bb); --i)
1023     loe_visit_block (live, bb, visited);
1024 
1025   /* Process any blocks which require further iteration.  */
1026   while (live->stack_top != live->work_stack)
1027     {
1028       b = *--(live->stack_top);
1029       loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
1030     }
1031 }
1032 
1033 
1034 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1035    links.  Set the live on entry fields in LIVE.  Def's are marked temporarily
1036    in the liveout vector.  */
1037 
1038 static void
set_var_live_on_entry(tree ssa_name,tree_live_info_p live)1039 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1040 {
1041   int p;
1042   gimple *stmt;
1043   use_operand_p use;
1044   basic_block def_bb = NULL;
1045   imm_use_iterator imm_iter;
1046   bool global = false;
1047 
1048   p = var_to_partition (live->map, ssa_name);
1049   if (p == NO_PARTITION)
1050     return;
1051 
1052   stmt = SSA_NAME_DEF_STMT (ssa_name);
1053   if (stmt)
1054     {
1055       def_bb = gimple_bb (stmt);
1056       /* Mark defs in liveout bitmap temporarily.  */
1057       if (def_bb && region_contains_p (live->map, def_bb))
1058 	bitmap_set_bit (&live->liveout[def_bb->index], p);
1059     }
1060   else
1061     def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1062 
1063   /* An undefined local variable does not need to be very alive.  */
1064   if (ssa_undefined_value_p (ssa_name, false))
1065     return;
1066 
1067   /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1068      add it to the list of live on entry blocks.  */
1069   FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1070     {
1071       gimple *use_stmt = USE_STMT (use);
1072       basic_block add_block = NULL;
1073 
1074       if (gimple_code (use_stmt) == GIMPLE_PHI)
1075         {
1076 	  /* Uses in PHI's are considered to be live at exit of the SRC block
1077 	     as this is where a copy would be inserted.  Check to see if it is
1078 	     defined in that block, or whether its live on entry.  */
1079 	  int index = PHI_ARG_INDEX_FROM_USE (use);
1080 	  edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index);
1081 	  if (e->src != def_bb && region_contains_p (live->map, e->src))
1082 	    add_block = e->src;
1083 	}
1084       else if (is_gimple_debug (use_stmt))
1085 	continue;
1086       else
1087         {
1088 	  /* If its not defined in this block, its live on entry.  */
1089 	  basic_block use_bb = gimple_bb (use_stmt);
1090 	  if (use_bb != def_bb && region_contains_p (live->map, use_bb))
1091 	    add_block = use_bb;
1092 	}
1093 
1094       /* If there was a live on entry use, set the bit.  */
1095       if (add_block)
1096         {
1097 	  global = true;
1098 	  bitmap_set_bit (&live->livein[add_block->index], p);
1099 	}
1100     }
1101 
1102   /* If SSA_NAME is live on entry to at least one block, fill in all the live
1103      on entry blocks between the def and all the uses.  */
1104   if (global)
1105     bitmap_set_bit (live->global, p);
1106 }
1107 
1108 
1109 /* Calculate the live on exit vectors based on the entry info in LIVEINFO.  */
1110 
1111 static void
calculate_live_on_exit(tree_live_info_p liveinfo)1112 calculate_live_on_exit (tree_live_info_p liveinfo)
1113 {
1114   basic_block bb;
1115   edge e;
1116   edge_iterator ei;
1117 
1118   /* live on entry calculations used liveout vectors for defs, clear them.  */
1119   for (unsigned i = 0; liveinfo->map->vec_bbs.iterate (i, &bb); ++i)
1120     bitmap_clear (&liveinfo->liveout[bb->index]);
1121 
1122   /* Set all the live-on-exit bits for uses in PHIs.  */
1123   FOR_EACH_BB_FN (bb, cfun)
1124     {
1125       gphi_iterator gsi;
1126       size_t i;
1127 
1128       /* Mark the PHI arguments which are live on exit to the pred block.  */
1129       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1130 	{
1131 	  gphi *phi = gsi.phi ();
1132 	  if (virtual_operand_p (gimple_phi_result (phi)))
1133 	    continue;
1134 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1135 	    {
1136 	      tree t = PHI_ARG_DEF (phi, i);
1137 	      int p;
1138 
1139 	      if (TREE_CODE (t) != SSA_NAME)
1140 		continue;
1141 
1142 	      p = var_to_partition (liveinfo->map, t);
1143 	      if (p == NO_PARTITION)
1144 		continue;
1145 	      e = gimple_phi_arg_edge (phi, i);
1146 	      if (region_contains_p (liveinfo->map, e->src))
1147 		bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
1148 	    }
1149 	}
1150 
1151       if (!region_contains_p (liveinfo->map, bb))
1152 	continue;
1153 
1154       /* Add each successors live on entry to this bock live on exit.  */
1155       FOR_EACH_EDGE (e, ei, bb->succs)
1156 	if (region_contains_p (liveinfo->map, e->dest))
1157 	  bitmap_ior_into (&liveinfo->liveout[bb->index],
1158 			   live_on_entry (liveinfo, e->dest));
1159     }
1160 }
1161 
1162 
1163 /* Given partition map MAP, calculate all the live on entry bitmaps for
1164    each partition.  Return a new live info object.  */
1165 
1166 tree_live_info_p
calculate_live_ranges(var_map map,bool want_livein)1167 calculate_live_ranges (var_map map, bool want_livein)
1168 {
1169   tree var;
1170   unsigned i;
1171   tree_live_info_p live;
1172 
1173   live = new_tree_live_info (map);
1174   for (i = 0; i < num_var_partitions (map); i++)
1175     {
1176       var = partition_to_var (map, i);
1177       if (var != NULL_TREE)
1178 	set_var_live_on_entry (var, live);
1179     }
1180 
1181   live_worklist (live);
1182 
1183   if (flag_checking)
1184     verify_live_on_entry (live);
1185 
1186   calculate_live_on_exit (live);
1187 
1188   if (!want_livein)
1189     {
1190       bitmap_obstack_release (&live->livein_obstack);
1191       free (live->livein);
1192       live->livein = NULL;
1193     }
1194 
1195   return live;
1196 }
1197 
1198 
1199 /* Output partition map MAP to file F.  */
1200 
1201 void
dump_var_map(FILE * f,var_map map)1202 dump_var_map (FILE *f, var_map map)
1203 {
1204   int t;
1205   unsigned x, y;
1206   int p;
1207 
1208   fprintf (f, "\nPartition map \n\n");
1209 
1210   for (x = 0; x < map->num_partitions; x++)
1211     {
1212       if (map->view_to_partition != NULL)
1213 	p = map->view_to_partition[x];
1214       else
1215 	p = x;
1216 
1217       if (ssa_name (p) == NULL_TREE
1218 	  || virtual_operand_p (ssa_name (p)))
1219         continue;
1220 
1221       t = 0;
1222       for (y = 1; y < num_ssa_names; y++)
1223         {
1224 	  p = partition_find (map->var_partition, y);
1225 	  if (map->partition_to_view)
1226 	    p = map->partition_to_view[p];
1227 	  if (p == (int)x)
1228 	    {
1229 	      if (t++ == 0)
1230 	        {
1231 		  fprintf (f, "Partition %d (", x);
1232 		  print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1233 		  fprintf (f, " - ");
1234 		}
1235 	      fprintf (f, "%d ", y);
1236 	    }
1237 	}
1238       if (t != 0)
1239 	fprintf (f, ")\n");
1240     }
1241   fprintf (f, "\n");
1242 }
1243 
1244 
1245 /* Generic dump for the above.  */
1246 
1247 DEBUG_FUNCTION void
debug(_var_map & ref)1248 debug (_var_map &ref)
1249 {
1250   dump_var_map (stderr, &ref);
1251 }
1252 
1253 DEBUG_FUNCTION void
debug(_var_map * ptr)1254 debug (_var_map *ptr)
1255 {
1256   if (ptr)
1257     debug (*ptr);
1258   else
1259     fprintf (stderr, "<nil>\n");
1260 }
1261 
1262 
1263 /* Output live range info LIVE to file F, controlled by FLAG.  */
1264 
1265 void
dump_live_info(FILE * f,tree_live_info_p live,int flag)1266 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1267 {
1268   basic_block bb;
1269   unsigned i;
1270   var_map map = live->map;
1271   bitmap_iterator bi;
1272 
1273   if ((flag & LIVEDUMP_ENTRY) && live->livein)
1274     {
1275       FOR_EACH_BB_FN (bb, cfun)
1276 	{
1277 	  fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1278 	  EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
1279 	    {
1280 	      print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1281 	      fprintf (f, "  ");
1282 	    }
1283 	  fprintf (f, "\n");
1284 	}
1285     }
1286 
1287   if ((flag & LIVEDUMP_EXIT) && live->liveout)
1288     {
1289       FOR_EACH_BB_FN (bb, cfun)
1290 	{
1291 	  fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1292 	  EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
1293 	    {
1294 	      print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1295 	      fprintf (f, "  ");
1296 	    }
1297 	  fprintf (f, "\n");
1298 	}
1299     }
1300 }
1301 
1302 
1303 /* Generic dump for the above.  */
1304 
1305 DEBUG_FUNCTION void
debug(tree_live_info_d & ref)1306 debug (tree_live_info_d &ref)
1307 {
1308   dump_live_info (stderr, &ref, 0);
1309 }
1310 
1311 DEBUG_FUNCTION void
debug(tree_live_info_d * ptr)1312 debug (tree_live_info_d *ptr)
1313 {
1314   if (ptr)
1315     debug (*ptr);
1316   else
1317     fprintf (stderr, "<nil>\n");
1318 }
1319 
1320 
1321 /* Verify that the info in LIVE matches the current cfg.  */
1322 
1323 static void
verify_live_on_entry(tree_live_info_p live)1324 verify_live_on_entry (tree_live_info_p live)
1325 {
1326   unsigned i;
1327   tree var;
1328   gimple *stmt;
1329   basic_block bb;
1330   edge e;
1331   int num;
1332   edge_iterator ei;
1333   var_map map = live->map;
1334 
1335    /* Check for live on entry partitions and report those with a DEF in
1336       the program. This will typically mean an optimization has done
1337       something wrong.  */
1338   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1339   num = 0;
1340   FOR_EACH_EDGE (e, ei, bb->succs)
1341     {
1342       int entry_block = e->dest->index;
1343       if (!region_contains_p (live->map, e->dest))
1344         continue;
1345       for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1346 	{
1347 	  basic_block tmp;
1348 	  tree d = NULL_TREE;
1349 	  bitmap loe;
1350 	  var = partition_to_var (map, i);
1351 	  stmt = SSA_NAME_DEF_STMT (var);
1352 	  tmp = gimple_bb (stmt);
1353 	  if (SSA_NAME_VAR (var))
1354 	    d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1355 
1356 	  loe = live_on_entry (live, e->dest);
1357 	  if (loe && bitmap_bit_p (loe, i))
1358 	    {
1359 	      if (!gimple_nop_p (stmt))
1360 		{
1361 		  num++;
1362 		  print_generic_expr (stderr, var, TDF_SLIM);
1363 		  fprintf (stderr, " is defined ");
1364 		  if (tmp)
1365 		    fprintf (stderr, " in BB%d, ", tmp->index);
1366 		  fprintf (stderr, "by:\n");
1367 		  print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1368 		  fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1369 			   entry_block);
1370 		  fprintf (stderr, " So it appears to have multiple defs.\n");
1371 		}
1372 	      else
1373 	        {
1374 		  if (d != var)
1375 		    {
1376 		      num++;
1377 		      print_generic_expr (stderr, var, TDF_SLIM);
1378 		      fprintf (stderr, " is live-on-entry to BB%d ",
1379 			       entry_block);
1380 		      if (d)
1381 		        {
1382 			  fprintf (stderr, " but is not the default def of ");
1383 			  print_generic_expr (stderr, d, TDF_SLIM);
1384 			  fprintf (stderr, "\n");
1385 			}
1386 		      else
1387 			fprintf (stderr, " and there is no default def.\n");
1388 		    }
1389 		}
1390 	    }
1391 	  else
1392 	    if (d == var)
1393 	      {
1394 		/* An undefined local variable does not need to be very
1395 		   alive.  */
1396 		if (ssa_undefined_value_p (var, false))
1397 		  continue;
1398 
1399 		/* The only way this var shouldn't be marked live on entry is
1400 		   if it occurs in a PHI argument of the block.  */
1401 		size_t z;
1402 		bool ok = false;
1403 		gphi_iterator gsi;
1404 		for (gsi = gsi_start_phis (e->dest);
1405 		     !gsi_end_p (gsi) && !ok;
1406 		     gsi_next (&gsi))
1407 		  {
1408 		    gphi *phi = gsi.phi ();
1409 		    if (virtual_operand_p (gimple_phi_result (phi)))
1410 		      continue;
1411 		    for (z = 0; z < gimple_phi_num_args (phi); z++)
1412 		      if (var == gimple_phi_arg_def (phi, z))
1413 			{
1414 			  ok = true;
1415 			  break;
1416 			}
1417 		  }
1418 		if (ok)
1419 		  continue;
1420 		/* Expand adds unused default defs for PARM_DECLs and
1421 		   RESULT_DECLs.  They're ok.  */
1422 		if (has_zero_uses (var)
1423 		    && SSA_NAME_VAR (var)
1424 		    && !VAR_P (SSA_NAME_VAR (var)))
1425 		  continue;
1426 	        num++;
1427 		print_generic_expr (stderr, var, TDF_SLIM);
1428 		fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1429 			 entry_block);
1430 		fprintf (stderr, "but it is a default def so it should be.\n");
1431 	      }
1432 	}
1433     }
1434   gcc_assert (num <= 0);
1435 }
1436