1 /* Scanning of rtl for dataflow analysis.
2    Copyright (C) 1999-2016 Free Software Foundation, Inc.
3    Originally contributed by Michael P. Hayes
4              (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
5    Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
6              and Kenneth Zadeck (zadeck@naturalbridge.com).
7 
8 This file is part of GCC.
9 
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14 
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18 for more details.
19 
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3.  If not see
22 <http://www.gnu.org/licenses/>.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "df.h"
32 #include "tm_p.h"
33 #include "regs.h"
34 #include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
35 #include "dumpfile.h"
36 
37 
38 /* The set of hard registers in eliminables[i].from. */
39 
40 static HARD_REG_SET elim_reg_set;
41 
42 /* Initialize ur_in and ur_out as if all hard registers were partially
43    available.  */
44 
45 struct df_collection_rec
46 {
47   auto_vec<df_ref, 128> def_vec;
48   auto_vec<df_ref, 32> use_vec;
49   auto_vec<df_ref, 32> eq_use_vec;
50   auto_vec<df_mw_hardreg *, 32> mw_vec;
51 };
52 
53 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
54 			   rtx, rtx *,
55 			   basic_block, struct df_insn_info *,
56 			   enum df_ref_type, int ref_flags);
57 static void df_def_record_1 (struct df_collection_rec *, rtx *,
58 			     basic_block, struct df_insn_info *,
59 			     int ref_flags);
60 static void df_defs_record (struct df_collection_rec *, rtx,
61 			    basic_block, struct df_insn_info *,
62 			    int ref_flags);
63 static void df_uses_record (struct df_collection_rec *,
64 			    rtx *, enum df_ref_type,
65 			    basic_block, struct df_insn_info *,
66 			    int ref_flags);
67 
68 static void df_install_ref_incremental (df_ref);
69 static void df_insn_refs_collect (struct df_collection_rec*,
70 				  basic_block, struct df_insn_info *);
71 static void df_canonize_collection_rec (struct df_collection_rec *);
72 
73 static void df_get_regular_block_artificial_uses (bitmap);
74 static void df_get_eh_block_artificial_uses (bitmap);
75 
76 static void df_record_entry_block_defs (bitmap);
77 static void df_record_exit_block_uses (bitmap);
78 static void df_get_exit_block_use_set (bitmap);
79 static void df_get_entry_block_def_set (bitmap);
80 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
81 static void df_ref_chain_delete_du_chain (df_ref);
82 static void df_ref_chain_delete (df_ref);
83 
84 static void df_refs_add_to_chains (struct df_collection_rec *,
85 				   basic_block, rtx_insn *, unsigned int);
86 
87 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block,
88 				 rtx_insn *, bool);
89 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
90 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
91 static void df_install_ref (df_ref, struct df_reg_info *,
92 			    struct df_ref_info *, bool);
93 
94 static int df_ref_compare (df_ref, df_ref);
95 static int df_ref_ptr_compare (const void *, const void *);
96 static int df_mw_compare (const df_mw_hardreg *, const df_mw_hardreg *);
97 static int df_mw_ptr_compare (const void *, const void *);
98 
99 static void df_insn_info_delete (unsigned int);
100 
101 /* Indexed by hardware reg number, is true if that register is ever
102    used in the current function.
103 
104    In df-scan.c, this is set up to record the hard regs used
105    explicitly.  Reload adds in the hard regs used for holding pseudo
106    regs.  Final uses it to generate the code in the function prologue
107    and epilogue to save and restore registers as needed.  */
108 
109 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
110 
111 /* Flags used to tell df_refs_add_to_chains() which vectors it should copy. */
112 static const unsigned int copy_defs = 0x1;
113 static const unsigned int copy_uses = 0x2;
114 static const unsigned int copy_eq_uses = 0x4;
115 static const unsigned int copy_mw = 0x8;
116 static const unsigned int copy_all = copy_defs | copy_uses | copy_eq_uses
117 | copy_mw;
118 
119 /*----------------------------------------------------------------------------
120    SCANNING DATAFLOW PROBLEM
121 
122    There are several ways in which scanning looks just like the other
123    dataflow problems.  It shares the all the mechanisms for local info
124    as well as basic block info.  Where it differs is when and how often
125    it gets run.  It also has no need for the iterative solver.
126 ----------------------------------------------------------------------------*/
127 
128 /* Problem data for the scanning dataflow function.  */
129 struct df_scan_problem_data
130 {
131   object_allocator<df_base_ref> *ref_base_pool;
132   object_allocator<df_artificial_ref> *ref_artificial_pool;
133   object_allocator<df_regular_ref> *ref_regular_pool;
134   object_allocator<df_insn_info> *insn_pool;
135   object_allocator<df_reg_info> *reg_pool;
136   object_allocator<df_mw_hardreg> *mw_reg_pool;
137 
138   bitmap_obstack reg_bitmaps;
139   bitmap_obstack insn_bitmaps;
140 };
141 
142 /* Internal function to shut down the scanning problem.  */
143 static void
df_scan_free_internal(void)144 df_scan_free_internal (void)
145 {
146   struct df_scan_problem_data *problem_data
147     = (struct df_scan_problem_data *) df_scan->problem_data;
148 
149   free (df->def_info.refs);
150   free (df->def_info.begin);
151   free (df->def_info.count);
152   memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
153 
154   free (df->use_info.refs);
155   free (df->use_info.begin);
156   free (df->use_info.count);
157   memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
158 
159   free (df->def_regs);
160   df->def_regs = NULL;
161   free (df->use_regs);
162   df->use_regs = NULL;
163   free (df->eq_use_regs);
164   df->eq_use_regs = NULL;
165   df->regs_size = 0;
166   DF_REG_SIZE (df) = 0;
167 
168   free (df->insns);
169   df->insns = NULL;
170   DF_INSN_SIZE () = 0;
171 
172   free (df_scan->block_info);
173   df_scan->block_info = NULL;
174   df_scan->block_info_size = 0;
175 
176   bitmap_clear (&df->hardware_regs_used);
177   bitmap_clear (&df->regular_block_artificial_uses);
178   bitmap_clear (&df->eh_block_artificial_uses);
179   BITMAP_FREE (df->entry_block_defs);
180   BITMAP_FREE (df->exit_block_uses);
181   bitmap_clear (&df->insns_to_delete);
182   bitmap_clear (&df->insns_to_rescan);
183   bitmap_clear (&df->insns_to_notes_rescan);
184 
185   delete problem_data->ref_base_pool;
186   delete problem_data->ref_artificial_pool;
187   delete problem_data->ref_regular_pool;
188   delete problem_data->insn_pool;
189   delete problem_data->reg_pool;
190   delete problem_data->mw_reg_pool;
191   bitmap_obstack_release (&problem_data->reg_bitmaps);
192   bitmap_obstack_release (&problem_data->insn_bitmaps);
193   free (df_scan->problem_data);
194 }
195 
196 
197 /* Free basic block info.  */
198 
199 static void
df_scan_free_bb_info(basic_block bb,void * vbb_info)200 df_scan_free_bb_info (basic_block bb, void *vbb_info)
201 {
202   struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
203   unsigned int bb_index = bb->index;
204   rtx_insn *insn;
205 
206   FOR_BB_INSNS (bb, insn)
207     if (INSN_P (insn))
208       df_insn_info_delete (INSN_UID (insn));
209 
210   if (bb_index < df_scan->block_info_size)
211     bb_info = df_scan_get_bb_info (bb_index);
212 
213   /* Get rid of any artificial uses or defs.  */
214   df_ref_chain_delete_du_chain (bb_info->artificial_defs);
215   df_ref_chain_delete_du_chain (bb_info->artificial_uses);
216   df_ref_chain_delete (bb_info->artificial_defs);
217   df_ref_chain_delete (bb_info->artificial_uses);
218   bb_info->artificial_defs = NULL;
219   bb_info->artificial_uses = NULL;
220 }
221 
222 
223 /* Allocate the problem data for the scanning problem.  This should be
224    called when the problem is created or when the entire function is to
225    be rescanned.  */
226 void
df_scan_alloc(bitmap all_blocks ATTRIBUTE_UNUSED)227 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
228 {
229   struct df_scan_problem_data *problem_data;
230   unsigned int insn_num = get_max_uid () + 1;
231   basic_block bb;
232 
233   /* Given the number of pools, this is really faster than tearing
234      everything apart.  */
235   if (df_scan->problem_data)
236     df_scan_free_internal ();
237 
238   problem_data = XNEW (struct df_scan_problem_data);
239   df_scan->problem_data = problem_data;
240   df_scan->computed = true;
241 
242   problem_data->ref_base_pool = new object_allocator<df_base_ref>
243     ("df_scan ref base");
244   problem_data->ref_artificial_pool = new object_allocator<df_artificial_ref>
245     ("df_scan ref artificial");
246   problem_data->ref_regular_pool = new object_allocator<df_regular_ref>
247     ("df_scan ref regular");
248   problem_data->insn_pool = new object_allocator<df_insn_info>
249     ("df_scan insn");
250   problem_data->reg_pool = new object_allocator<df_reg_info>
251     ("df_scan reg");
252   problem_data->mw_reg_pool = new object_allocator<df_mw_hardreg>
253     ("df_scan mw_reg");
254 
255   bitmap_obstack_initialize (&problem_data->reg_bitmaps);
256   bitmap_obstack_initialize (&problem_data->insn_bitmaps);
257 
258   insn_num += insn_num / 4;
259   df_grow_reg_info ();
260 
261   df_grow_insn_info ();
262   df_grow_bb_info (df_scan);
263 
264   FOR_ALL_BB_FN (bb, cfun)
265     {
266       unsigned int bb_index = bb->index;
267       struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
268       bb_info->artificial_defs = NULL;
269       bb_info->artificial_uses = NULL;
270     }
271 
272   bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
273   bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
274   bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
275   df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
276   df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
277   bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
278   bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
279   bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
280   df_scan->optional_p = false;
281 }
282 
283 
284 /* Free all of the data associated with the scan problem.  */
285 
286 static void
df_scan_free(void)287 df_scan_free (void)
288 {
289   if (df_scan->problem_data)
290     df_scan_free_internal ();
291 
292   if (df->blocks_to_analyze)
293     {
294       BITMAP_FREE (df->blocks_to_analyze);
295       df->blocks_to_analyze = NULL;
296     }
297 
298   free (df_scan);
299 }
300 
301 /* Dump the preamble for DF_SCAN dump. */
302 static void
df_scan_start_dump(FILE * file ATTRIBUTE_UNUSED)303 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
304 {
305   int i;
306   int dcount = 0;
307   int ucount = 0;
308   int ecount = 0;
309   int icount = 0;
310   int ccount = 0;
311   basic_block bb;
312   rtx_insn *insn;
313 
314   fprintf (file, ";;  invalidated by call \t");
315   df_print_regset (file, regs_invalidated_by_call_regset);
316   fprintf (file, ";;  hardware regs used \t");
317   df_print_regset (file, &df->hardware_regs_used);
318   fprintf (file, ";;  regular block artificial uses \t");
319   df_print_regset (file, &df->regular_block_artificial_uses);
320   fprintf (file, ";;  eh block artificial uses \t");
321   df_print_regset (file, &df->eh_block_artificial_uses);
322   fprintf (file, ";;  entry block defs \t");
323   df_print_regset (file, df->entry_block_defs);
324   fprintf (file, ";;  exit block uses \t");
325   df_print_regset (file, df->exit_block_uses);
326   fprintf (file, ";;  regs ever live \t");
327   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
328     if (df_regs_ever_live_p (i))
329       fprintf (file, " %d [%s]", i, reg_names[i]);
330   fprintf (file, "\n;;  ref usage \t");
331 
332   for (i = 0; i < (int)df->regs_inited; i++)
333     if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
334       {
335 	const char * sep = "";
336 
337 	fprintf (file, "r%d={", i);
338 	if (DF_REG_DEF_COUNT (i))
339 	  {
340 	    fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
341 	    sep = ",";
342 	    dcount += DF_REG_DEF_COUNT (i);
343 	  }
344 	if (DF_REG_USE_COUNT (i))
345 	  {
346 	    fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
347 	    sep = ",";
348 	    ucount += DF_REG_USE_COUNT (i);
349 	  }
350 	if (DF_REG_EQ_USE_COUNT (i))
351 	  {
352 	    fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
353 	    ecount += DF_REG_EQ_USE_COUNT (i);
354 	  }
355 	fprintf (file, "} ");
356       }
357 
358   FOR_EACH_BB_FN (bb, cfun)
359     FOR_BB_INSNS (bb, insn)
360       if (INSN_P (insn))
361 	{
362 	  if (CALL_P (insn))
363 	    ccount++;
364 	  else
365 	    icount++;
366 	}
367 
368   fprintf (file, "\n;;    total ref usage %d{%dd,%du,%de}"
369 		 " in %d{%d regular + %d call} insns.\n",
370 		 dcount + ucount + ecount, dcount, ucount, ecount,
371 		 icount + ccount, icount, ccount);
372 }
373 
374 /* Dump the bb_info for a given basic block. */
375 static void
df_scan_start_block(basic_block bb,FILE * file)376 df_scan_start_block (basic_block bb, FILE *file)
377 {
378   struct df_scan_bb_info *bb_info
379     = df_scan_get_bb_info (bb->index);
380 
381   if (bb_info)
382     {
383       fprintf (file, ";; bb %d artificial_defs: ", bb->index);
384       df_refs_chain_dump (bb_info->artificial_defs, true, file);
385       fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
386       df_refs_chain_dump (bb_info->artificial_uses, true, file);
387       fprintf (file, "\n");
388     }
389 #if 0
390   {
391     rtx_insn *insn;
392     FOR_BB_INSNS (bb, insn)
393       if (INSN_P (insn))
394 	df_insn_debug (insn, false, file);
395   }
396 #endif
397 }
398 
399 static struct df_problem problem_SCAN =
400 {
401   DF_SCAN,                    /* Problem id.  */
402   DF_NONE,                    /* Direction.  */
403   df_scan_alloc,              /* Allocate the problem specific data.  */
404   NULL,                       /* Reset global information.  */
405   df_scan_free_bb_info,       /* Free basic block info.  */
406   NULL,                       /* Local compute function.  */
407   NULL,                       /* Init the solution specific data.  */
408   NULL,                       /* Iterative solver.  */
409   NULL,                       /* Confluence operator 0.  */
410   NULL,                       /* Confluence operator n.  */
411   NULL,                       /* Transfer function.  */
412   NULL,                       /* Finalize function.  */
413   df_scan_free,               /* Free all of the problem information.  */
414   NULL,                       /* Remove this problem from the stack of dataflow problems.  */
415   df_scan_start_dump,         /* Debugging.  */
416   df_scan_start_block,        /* Debugging start block.  */
417   NULL,                       /* Debugging end block.  */
418   NULL,                       /* Debugging start insn.  */
419   NULL,                       /* Debugging end insn.  */
420   NULL,                       /* Incremental solution verify start.  */
421   NULL,                       /* Incremental solution verify end.  */
422   NULL,                       /* Dependent problem.  */
423   sizeof (struct df_scan_bb_info),/* Size of entry of block_info array.  */
424   TV_DF_SCAN,                 /* Timing variable.  */
425   false                       /* Reset blocks on dropping out of blocks_to_analyze.  */
426 };
427 
428 
429 /* Create a new DATAFLOW instance and add it to an existing instance
430    of DF.  The returned structure is what is used to get at the
431    solution.  */
432 
433 void
df_scan_add_problem(void)434 df_scan_add_problem (void)
435 {
436   df_add_problem (&problem_SCAN);
437 }
438 
439 
440 /*----------------------------------------------------------------------------
441    Storage Allocation Utilities
442 ----------------------------------------------------------------------------*/
443 
444 
445 /* First, grow the reg_info information.  If the current size is less than
446    the number of pseudos, grow to 25% more than the number of
447    pseudos.
448 
449    Second, assure that all of the slots up to max_reg_num have been
450    filled with reg_info structures.  */
451 
452 void
df_grow_reg_info(void)453 df_grow_reg_info (void)
454 {
455   unsigned int max_reg = max_reg_num ();
456   unsigned int new_size = max_reg;
457   struct df_scan_problem_data *problem_data
458     = (struct df_scan_problem_data *) df_scan->problem_data;
459   unsigned int i;
460 
461   if (df->regs_size < new_size)
462     {
463       new_size += new_size / 4;
464       df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
465       df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
466       df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
467 				    new_size);
468       df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
469       df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
470       df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
471       df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
472       df->regs_size = new_size;
473     }
474 
475   for (i = df->regs_inited; i < max_reg; i++)
476     {
477       struct df_reg_info *reg_info;
478 
479       // TODO
480       reg_info = problem_data->reg_pool->allocate ();
481       memset (reg_info, 0, sizeof (struct df_reg_info));
482       df->def_regs[i] = reg_info;
483       reg_info = problem_data->reg_pool->allocate ();
484       memset (reg_info, 0, sizeof (struct df_reg_info));
485       df->use_regs[i] = reg_info;
486       reg_info = problem_data->reg_pool->allocate ();
487       memset (reg_info, 0, sizeof (struct df_reg_info));
488       df->eq_use_regs[i] = reg_info;
489       df->def_info.begin[i] = 0;
490       df->def_info.count[i] = 0;
491       df->use_info.begin[i] = 0;
492       df->use_info.count[i] = 0;
493     }
494 
495   df->regs_inited = max_reg;
496 }
497 
498 
499 /* Grow the ref information.  */
500 
501 static void
df_grow_ref_info(struct df_ref_info * ref_info,unsigned int new_size)502 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
503 {
504   if (ref_info->refs_size < new_size)
505     {
506       ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
507       memset (ref_info->refs + ref_info->refs_size, 0,
508 	      (new_size - ref_info->refs_size) *sizeof (df_ref));
509       ref_info->refs_size = new_size;
510     }
511 }
512 
513 
514 /* Check and grow the ref information if necessary.  This routine
515    guarantees total_size + BITMAP_ADDEND amount of entries in refs
516    array.  It updates ref_info->refs_size only and does not change
517    ref_info->total_size.  */
518 
519 static void
df_check_and_grow_ref_info(struct df_ref_info * ref_info,unsigned bitmap_addend)520 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
521 			    unsigned bitmap_addend)
522 {
523   if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
524     {
525       int new_size = ref_info->total_size + bitmap_addend;
526       new_size += ref_info->total_size / 4;
527       df_grow_ref_info (ref_info, new_size);
528     }
529 }
530 
531 
532 /* Grow the ref information.  If the current size is less than the
533    number of instructions, grow to 25% more than the number of
534    instructions.  */
535 
536 void
df_grow_insn_info(void)537 df_grow_insn_info (void)
538 {
539   unsigned int new_size = get_max_uid () + 1;
540   if (DF_INSN_SIZE () < new_size)
541     {
542       new_size += new_size / 4;
543       df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
544       memset (df->insns + df->insns_size, 0,
545 	      (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
546       DF_INSN_SIZE () = new_size;
547     }
548 }
549 
550 
551 
552 
553 /*----------------------------------------------------------------------------
554    PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
555 ----------------------------------------------------------------------------*/
556 
557 /* Rescan all of the block_to_analyze or all of the blocks in the
558    function if df_set_blocks if blocks_to_analyze is NULL;  */
559 
560 void
df_scan_blocks(void)561 df_scan_blocks (void)
562 {
563   basic_block bb;
564 
565   df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
566   df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
567 
568   df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
569   df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
570 
571   bitmap_ior_into (&df->eh_block_artificial_uses,
572 		   &df->regular_block_artificial_uses);
573 
574   /* ENTRY and EXIT blocks have special defs/uses.  */
575   df_get_entry_block_def_set (df->entry_block_defs);
576   df_record_entry_block_defs (df->entry_block_defs);
577   df_get_exit_block_use_set (df->exit_block_uses);
578   df_record_exit_block_uses (df->exit_block_uses);
579   df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
580   df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
581 
582   /* Regular blocks */
583   FOR_EACH_BB_FN (bb, cfun)
584     {
585       unsigned int bb_index = bb->index;
586       df_bb_refs_record (bb_index, true);
587     }
588 }
589 
590 /* Create new refs under address LOC within INSN.  This function is
591    only used externally.  REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
592    depending on whether LOC is inside PATTERN (INSN) or a note.  */
593 
594 void
df_uses_create(rtx * loc,rtx_insn * insn,int ref_flags)595 df_uses_create (rtx *loc, rtx_insn *insn, int ref_flags)
596 {
597   gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
598   df_uses_record (NULL, loc, DF_REF_REG_USE,
599                   BLOCK_FOR_INSN (insn),
600                   DF_INSN_INFO_GET (insn),
601                   ref_flags);
602 }
603 
604 static void
df_install_ref_incremental(df_ref ref)605 df_install_ref_incremental (df_ref ref)
606 {
607   struct df_reg_info **reg_info;
608   struct df_ref_info *ref_info;
609   df_ref *ref_ptr;
610   bool add_to_table;
611 
612   rtx_insn *insn = DF_REF_INSN (ref);
613   basic_block bb = BLOCK_FOR_INSN (insn);
614 
615   if (DF_REF_REG_DEF_P (ref))
616     {
617       reg_info = df->def_regs;
618       ref_info = &df->def_info;
619       ref_ptr = &DF_INSN_DEFS (insn);
620       add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
621     }
622   else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
623     {
624       reg_info = df->eq_use_regs;
625       ref_info = &df->use_info;
626       ref_ptr = &DF_INSN_EQ_USES (insn);
627       switch (ref_info->ref_order)
628 	{
629 	case DF_REF_ORDER_UNORDERED_WITH_NOTES:
630 	case DF_REF_ORDER_BY_REG_WITH_NOTES:
631 	case DF_REF_ORDER_BY_INSN_WITH_NOTES:
632 	  add_to_table = true;
633 	  break;
634 	default:
635 	  add_to_table = false;
636 	  break;
637 	}
638     }
639   else
640     {
641       reg_info = df->use_regs;
642       ref_info = &df->use_info;
643       ref_ptr = &DF_INSN_USES (insn);
644       add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
645     }
646 
647   /* Do not add if ref is not in the right blocks.  */
648   if (add_to_table && df->analyze_subset)
649     add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
650 
651   df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
652 
653   if (add_to_table)
654     switch (ref_info->ref_order)
655       {
656       case DF_REF_ORDER_UNORDERED_WITH_NOTES:
657       case DF_REF_ORDER_BY_REG_WITH_NOTES:
658       case DF_REF_ORDER_BY_INSN_WITH_NOTES:
659 	ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
660 	break;
661       default:
662 	ref_info->ref_order = DF_REF_ORDER_UNORDERED;
663 	break;
664       }
665 
666   while (*ref_ptr && df_ref_compare (*ref_ptr, ref) < 0)
667     ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
668 
669   DF_REF_NEXT_LOC (ref) = *ref_ptr;
670   *ref_ptr = ref;
671 
672 #if 0
673   if (dump_file)
674     {
675       fprintf (dump_file, "adding ref ");
676       df_ref_debug (ref, dump_file);
677     }
678 #endif
679   /* By adding the ref directly, df_insn_rescan my not find any
680      differences even though the block will have changed.  So we need
681      to mark the block dirty ourselves.  */
682   if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
683     df_set_bb_dirty (bb);
684 }
685 
686 
687 
688 /*----------------------------------------------------------------------------
689    UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
690 ----------------------------------------------------------------------------*/
691 
692 static void
df_free_ref(df_ref ref)693 df_free_ref (df_ref ref)
694 {
695   struct df_scan_problem_data *problem_data
696     = (struct df_scan_problem_data *) df_scan->problem_data;
697 
698   switch (DF_REF_CLASS (ref))
699     {
700     case DF_REF_BASE:
701       problem_data->ref_base_pool->remove ((df_base_ref *) (ref));
702       break;
703 
704     case DF_REF_ARTIFICIAL:
705       problem_data->ref_artificial_pool->remove
706 	((df_artificial_ref *) (ref));
707       break;
708 
709     case DF_REF_REGULAR:
710       problem_data->ref_regular_pool->remove
711 	((df_regular_ref *) (ref));
712       break;
713     }
714 }
715 
716 
717 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
718    Also delete the def-use or use-def chain if it exists.  */
719 
720 static void
df_reg_chain_unlink(df_ref ref)721 df_reg_chain_unlink (df_ref ref)
722 {
723   df_ref next = DF_REF_NEXT_REG (ref);
724   df_ref prev = DF_REF_PREV_REG (ref);
725   int id = DF_REF_ID (ref);
726   struct df_reg_info *reg_info;
727   df_ref *refs = NULL;
728 
729   if (DF_REF_REG_DEF_P (ref))
730     {
731       int regno = DF_REF_REGNO (ref);
732       reg_info = DF_REG_DEF_GET (regno);
733       refs = df->def_info.refs;
734     }
735   else
736     {
737       if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
738 	{
739 	  reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
740 	  switch (df->use_info.ref_order)
741 	    {
742 	    case DF_REF_ORDER_UNORDERED_WITH_NOTES:
743 	    case DF_REF_ORDER_BY_REG_WITH_NOTES:
744 	    case DF_REF_ORDER_BY_INSN_WITH_NOTES:
745 	      refs = df->use_info.refs;
746 	      break;
747 	    default:
748 	      break;
749 	    }
750 	}
751       else
752 	{
753 	  reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
754 	  refs = df->use_info.refs;
755 	}
756     }
757 
758   if (refs)
759     {
760       if (df->analyze_subset)
761 	{
762 	  if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
763 	    refs[id] = NULL;
764 	}
765       else
766 	refs[id] = NULL;
767     }
768 
769   /* Delete any def-use or use-def chains that start here. It is
770      possible that there is trash in this field.  This happens for
771      insns that have been deleted when rescanning has been deferred
772      and the chain problem has also been deleted.  The chain tear down
773      code skips deleted insns.  */
774   if (df_chain && DF_REF_CHAIN (ref))
775     df_chain_unlink (ref);
776 
777   reg_info->n_refs--;
778   if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
779     {
780       gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
781       df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
782     }
783 
784   /* Unlink from the reg chain.  If there is no prev, this is the
785      first of the list.  If not, just join the next and prev.  */
786   if (prev)
787     DF_REF_NEXT_REG (prev) = next;
788   else
789     {
790       gcc_assert (reg_info->reg_chain == ref);
791       reg_info->reg_chain = next;
792     }
793   if (next)
794     DF_REF_PREV_REG (next) = prev;
795 
796   df_free_ref (ref);
797 }
798 
799 /* Initialize INSN_INFO to describe INSN.  */
800 
801 static void
df_insn_info_init_fields(df_insn_info * insn_info,rtx_insn * insn)802 df_insn_info_init_fields (df_insn_info *insn_info, rtx_insn *insn)
803 {
804   memset (insn_info, 0, sizeof (struct df_insn_info));
805   insn_info->insn = insn;
806 }
807 
808 /* Create the insn record for INSN.  If there was one there, zero it
809    out.  */
810 
811 struct df_insn_info *
df_insn_create_insn_record(rtx_insn * insn)812 df_insn_create_insn_record (rtx_insn *insn)
813 {
814   struct df_scan_problem_data *problem_data
815     = (struct df_scan_problem_data *) df_scan->problem_data;
816   struct df_insn_info *insn_rec;
817 
818   df_grow_insn_info ();
819   insn_rec = DF_INSN_INFO_GET (insn);
820   if (!insn_rec)
821     {
822       insn_rec = problem_data->insn_pool->allocate ();
823       DF_INSN_INFO_SET (insn, insn_rec);
824     }
825   df_insn_info_init_fields (insn_rec, insn);
826   return insn_rec;
827 }
828 
829 
830 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain.  */
831 
832 static void
df_ref_chain_delete_du_chain(df_ref ref)833 df_ref_chain_delete_du_chain (df_ref ref)
834 {
835   for (; ref; ref = DF_REF_NEXT_LOC (ref))
836     /* CHAIN is allocated by DF_CHAIN. So make sure to
837        pass df_scan instance for the problem.  */
838     if (DF_REF_CHAIN (ref))
839       df_chain_unlink (ref);
840 }
841 
842 
843 /* Delete all refs in the ref chain.  */
844 
845 static void
df_ref_chain_delete(df_ref ref)846 df_ref_chain_delete (df_ref ref)
847 {
848   df_ref next;
849   for (; ref; ref = next)
850     {
851       next = DF_REF_NEXT_LOC (ref);
852       df_reg_chain_unlink (ref);
853     }
854 }
855 
856 
857 /* Delete the hardreg chain.  */
858 
859 static void
df_mw_hardreg_chain_delete(struct df_mw_hardreg * hardregs)860 df_mw_hardreg_chain_delete (struct df_mw_hardreg *hardregs)
861 {
862   struct df_scan_problem_data *problem_data
863     = (struct df_scan_problem_data *) df_scan->problem_data;
864   df_mw_hardreg *next;
865 
866   for (; hardregs; hardregs = next)
867     {
868       next = DF_MWS_NEXT (hardregs);
869       problem_data->mw_reg_pool->remove (hardregs);
870     }
871 }
872 
873 /* Remove the contents of INSN_INFO (but don't free INSN_INFO itself).  */
874 
875 static void
df_insn_info_free_fields(df_insn_info * insn_info)876 df_insn_info_free_fields (df_insn_info *insn_info)
877 {
878   /* In general, notes do not have the insn_info fields
879      initialized.  However, combine deletes insns by changing them
880      to notes.  How clever.  So we cannot just check if it is a
881      valid insn before short circuiting this code, we need to see
882      if we actually initialized it.  */
883   df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
884 
885   if (df_chain)
886     {
887       df_ref_chain_delete_du_chain (insn_info->defs);
888       df_ref_chain_delete_du_chain (insn_info->uses);
889       df_ref_chain_delete_du_chain (insn_info->eq_uses);
890     }
891 
892   df_ref_chain_delete (insn_info->defs);
893   df_ref_chain_delete (insn_info->uses);
894   df_ref_chain_delete (insn_info->eq_uses);
895 }
896 
897 /* Delete all of the refs information from the insn with UID.
898    Internal helper for df_insn_delete, df_insn_rescan, and other
899    df-scan routines that don't have to work in deferred mode
900    and do not have to mark basic blocks for re-processing.  */
901 
902 static void
df_insn_info_delete(unsigned int uid)903 df_insn_info_delete (unsigned int uid)
904 {
905   struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
906 
907   bitmap_clear_bit (&df->insns_to_delete, uid);
908   bitmap_clear_bit (&df->insns_to_rescan, uid);
909   bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
910   if (insn_info)
911     {
912       struct df_scan_problem_data *problem_data
913 	= (struct df_scan_problem_data *) df_scan->problem_data;
914 
915       df_insn_info_free_fields (insn_info);
916       problem_data->insn_pool->remove (insn_info);
917       DF_INSN_UID_SET (uid, NULL);
918     }
919 }
920 
921 /* Delete all of the refs information from INSN, either right now
922    or marked for later in deferred mode.  */
923 
924 void
df_insn_delete(rtx_insn * insn)925 df_insn_delete (rtx_insn *insn)
926 {
927   unsigned int uid;
928   basic_block bb;
929 
930   gcc_checking_assert (INSN_P (insn));
931 
932   if (!df)
933     return;
934 
935   uid = INSN_UID (insn);
936   bb = BLOCK_FOR_INSN (insn);
937 
938   /* ??? bb can be NULL after pass_free_cfg.  At that point, DF should
939      not exist anymore (as mentioned in df-core.c: "The only requirement
940      [for DF] is that there be a correct control flow graph."  Clearly
941      that isn't the case after pass_free_cfg.  But DF is freed much later
942      because some back-ends want to use DF info even though the CFG is
943      already gone.  It's not clear to me whether that is safe, actually.
944      In any case, we expect BB to be non-NULL at least up to register
945      allocation, so disallow a non-NULL BB up to there.  Not perfect
946      but better than nothing...  */
947   gcc_checking_assert (bb != NULL || reload_completed);
948 
949   df_grow_bb_info (df_scan);
950   df_grow_reg_info ();
951 
952   /* The block must be marked as dirty now, rather than later as in
953      df_insn_rescan and df_notes_rescan because it may not be there at
954      rescanning time and the mark would blow up.
955      DEBUG_INSNs do not make a block's data flow solution dirty (at
956      worst the LUIDs are no longer contiguous).  */
957   if (bb != NULL && NONDEBUG_INSN_P (insn))
958     df_set_bb_dirty (bb);
959 
960   /* The client has deferred rescanning.  */
961   if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
962     {
963       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
964       if (insn_info)
965 	{
966 	  bitmap_clear_bit (&df->insns_to_rescan, uid);
967 	  bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
968 	  bitmap_set_bit (&df->insns_to_delete, uid);
969 	}
970       if (dump_file)
971 	fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
972       return;
973     }
974 
975   if (dump_file)
976     fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
977 
978   df_insn_info_delete (uid);
979 }
980 
981 
982 /* Free all of the refs and the mw_hardregs in COLLECTION_REC.  */
983 
984 static void
df_free_collection_rec(struct df_collection_rec * collection_rec)985 df_free_collection_rec (struct df_collection_rec *collection_rec)
986 {
987   unsigned int ix;
988   struct df_scan_problem_data *problem_data
989     = (struct df_scan_problem_data *) df_scan->problem_data;
990   df_ref ref;
991   struct df_mw_hardreg *mw;
992 
993   FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
994     df_free_ref (ref);
995   FOR_EACH_VEC_ELT (collection_rec->use_vec, ix, ref)
996     df_free_ref (ref);
997   FOR_EACH_VEC_ELT (collection_rec->eq_use_vec, ix, ref)
998     df_free_ref (ref);
999   FOR_EACH_VEC_ELT (collection_rec->mw_vec, ix, mw)
1000     problem_data->mw_reg_pool->remove (mw);
1001 
1002   collection_rec->def_vec.release ();
1003   collection_rec->use_vec.release ();
1004   collection_rec->eq_use_vec.release ();
1005   collection_rec->mw_vec.release ();
1006 }
1007 
1008 /* Rescan INSN.  Return TRUE if the rescanning produced any changes.  */
1009 
1010 bool
df_insn_rescan(rtx_insn * insn)1011 df_insn_rescan (rtx_insn *insn)
1012 {
1013   unsigned int uid = INSN_UID (insn);
1014   struct df_insn_info *insn_info = NULL;
1015   basic_block bb = BLOCK_FOR_INSN (insn);
1016   struct df_collection_rec collection_rec;
1017 
1018   if ((!df) || (!INSN_P (insn)))
1019     return false;
1020 
1021   if (!bb)
1022     {
1023       if (dump_file)
1024 	fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1025       return false;
1026     }
1027 
1028   /* The client has disabled rescanning and plans to do it itself.  */
1029   if (df->changeable_flags & DF_NO_INSN_RESCAN)
1030     return false;
1031 
1032   df_grow_bb_info (df_scan);
1033   df_grow_reg_info ();
1034 
1035   insn_info = DF_INSN_UID_SAFE_GET (uid);
1036 
1037   /* The client has deferred rescanning.  */
1038   if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1039     {
1040       if (!insn_info)
1041 	{
1042 	  insn_info = df_insn_create_insn_record (insn);
1043 	  insn_info->defs = 0;
1044 	  insn_info->uses = 0;
1045 	  insn_info->eq_uses = 0;
1046 	  insn_info->mw_hardregs = 0;
1047 	}
1048       if (dump_file)
1049 	fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1050 
1051       bitmap_clear_bit (&df->insns_to_delete, uid);
1052       bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1053       bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1054       return false;
1055     }
1056 
1057   bitmap_clear_bit (&df->insns_to_delete, uid);
1058   bitmap_clear_bit (&df->insns_to_rescan, uid);
1059   bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1060   if (insn_info)
1061     {
1062       int luid;
1063       bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1064       /* If there's no change, return false. */
1065       if (the_same)
1066 	{
1067 	  df_free_collection_rec (&collection_rec);
1068 	  if (dump_file)
1069 	    fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1070 	  return false;
1071 	}
1072       if (dump_file)
1073 	fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1074 
1075       /* There's change - we need to delete the existing info.
1076 	 Since the insn isn't moved, we can salvage its LUID.  */
1077       luid = DF_INSN_LUID (insn);
1078       df_insn_info_free_fields (insn_info);
1079       df_insn_info_init_fields (insn_info, insn);
1080       DF_INSN_LUID (insn) = luid;
1081     }
1082   else
1083     {
1084       struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1085       df_insn_refs_collect (&collection_rec, bb, insn_info);
1086       if (dump_file)
1087 	fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1088     }
1089 
1090   df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
1091   if (!DEBUG_INSN_P (insn))
1092     df_set_bb_dirty (bb);
1093 
1094   return true;
1095 }
1096 
1097 /* Same as df_insn_rescan, but don't mark the basic block as
1098    dirty.  */
1099 
1100 bool
df_insn_rescan_debug_internal(rtx_insn * insn)1101 df_insn_rescan_debug_internal (rtx_insn *insn)
1102 {
1103   unsigned int uid = INSN_UID (insn);
1104   struct df_insn_info *insn_info;
1105 
1106   gcc_assert (DEBUG_INSN_P (insn)
1107 	      && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1108 
1109   if (!df)
1110     return false;
1111 
1112   insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1113   if (!insn_info)
1114     return false;
1115 
1116   if (dump_file)
1117     fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1118 
1119   bitmap_clear_bit (&df->insns_to_delete, uid);
1120   bitmap_clear_bit (&df->insns_to_rescan, uid);
1121   bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1122 
1123   if (insn_info->defs == 0
1124       && insn_info->uses == 0
1125       && insn_info->eq_uses == 0
1126       && insn_info->mw_hardregs == 0)
1127     return false;
1128 
1129   df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1130 
1131   if (df_chain)
1132     {
1133       df_ref_chain_delete_du_chain (insn_info->defs);
1134       df_ref_chain_delete_du_chain (insn_info->uses);
1135       df_ref_chain_delete_du_chain (insn_info->eq_uses);
1136     }
1137 
1138   df_ref_chain_delete (insn_info->defs);
1139   df_ref_chain_delete (insn_info->uses);
1140   df_ref_chain_delete (insn_info->eq_uses);
1141 
1142   insn_info->defs = 0;
1143   insn_info->uses = 0;
1144   insn_info->eq_uses = 0;
1145   insn_info->mw_hardregs = 0;
1146 
1147   return true;
1148 }
1149 
1150 
1151 /* Rescan all of the insns in the function.  Note that the artificial
1152    uses and defs are not touched.  This function will destroy def-use
1153    or use-def chains.  */
1154 
1155 void
df_insn_rescan_all(void)1156 df_insn_rescan_all (void)
1157 {
1158   bool no_insn_rescan = false;
1159   bool defer_insn_rescan = false;
1160   basic_block bb;
1161   bitmap_iterator bi;
1162   unsigned int uid;
1163   bitmap_head tmp;
1164 
1165   bitmap_initialize (&tmp, &df_bitmap_obstack);
1166 
1167   if (df->changeable_flags & DF_NO_INSN_RESCAN)
1168     {
1169       df_clear_flags (DF_NO_INSN_RESCAN);
1170       no_insn_rescan = true;
1171     }
1172 
1173   if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1174     {
1175       df_clear_flags (DF_DEFER_INSN_RESCAN);
1176       defer_insn_rescan = true;
1177     }
1178 
1179   bitmap_copy (&tmp, &df->insns_to_delete);
1180   EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1181     {
1182       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1183       if (insn_info)
1184 	df_insn_info_delete (uid);
1185     }
1186 
1187   bitmap_clear (&tmp);
1188   bitmap_clear (&df->insns_to_delete);
1189   bitmap_clear (&df->insns_to_rescan);
1190   bitmap_clear (&df->insns_to_notes_rescan);
1191 
1192   FOR_EACH_BB_FN (bb, cfun)
1193     {
1194       rtx_insn *insn;
1195       FOR_BB_INSNS (bb, insn)
1196 	{
1197 	  df_insn_rescan (insn);
1198 	}
1199     }
1200 
1201   if (no_insn_rescan)
1202     df_set_flags (DF_NO_INSN_RESCAN);
1203   if (defer_insn_rescan)
1204     df_set_flags (DF_DEFER_INSN_RESCAN);
1205 }
1206 
1207 
1208 /* Process all of the deferred rescans or deletions.  */
1209 
1210 void
df_process_deferred_rescans(void)1211 df_process_deferred_rescans (void)
1212 {
1213   bool no_insn_rescan = false;
1214   bool defer_insn_rescan = false;
1215   bitmap_iterator bi;
1216   unsigned int uid;
1217   bitmap_head tmp;
1218 
1219   bitmap_initialize (&tmp, &df_bitmap_obstack);
1220 
1221   if (df->changeable_flags & DF_NO_INSN_RESCAN)
1222     {
1223       df_clear_flags (DF_NO_INSN_RESCAN);
1224       no_insn_rescan = true;
1225     }
1226 
1227   if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1228     {
1229       df_clear_flags (DF_DEFER_INSN_RESCAN);
1230       defer_insn_rescan = true;
1231     }
1232 
1233   if (dump_file)
1234     fprintf (dump_file, "starting the processing of deferred insns\n");
1235 
1236   bitmap_copy (&tmp, &df->insns_to_delete);
1237   EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1238     {
1239       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1240       if (insn_info)
1241 	df_insn_info_delete (uid);
1242     }
1243 
1244   bitmap_copy (&tmp, &df->insns_to_rescan);
1245   EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1246     {
1247       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1248       if (insn_info)
1249 	df_insn_rescan (insn_info->insn);
1250     }
1251 
1252   bitmap_copy (&tmp, &df->insns_to_notes_rescan);
1253   EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1254     {
1255       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1256       if (insn_info)
1257 	df_notes_rescan (insn_info->insn);
1258     }
1259 
1260   if (dump_file)
1261     fprintf (dump_file, "ending the processing of deferred insns\n");
1262 
1263   bitmap_clear (&tmp);
1264   bitmap_clear (&df->insns_to_delete);
1265   bitmap_clear (&df->insns_to_rescan);
1266   bitmap_clear (&df->insns_to_notes_rescan);
1267 
1268   if (no_insn_rescan)
1269     df_set_flags (DF_NO_INSN_RESCAN);
1270   if (defer_insn_rescan)
1271     df_set_flags (DF_DEFER_INSN_RESCAN);
1272 
1273   /* If someone changed regs_ever_live during this pass, fix up the
1274      entry and exit blocks.  */
1275   if (df->redo_entry_and_exit)
1276     {
1277       df_update_entry_exit_and_calls ();
1278       df->redo_entry_and_exit = false;
1279     }
1280 }
1281 
1282 
1283 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1284    the uses if INCLUDE_USES. Include the eq_uses if
1285    INCLUDE_EQ_USES.  */
1286 
1287 static unsigned int
df_count_refs(bool include_defs,bool include_uses,bool include_eq_uses)1288 df_count_refs (bool include_defs, bool include_uses,
1289 	       bool include_eq_uses)
1290 {
1291   unsigned int regno;
1292   int size = 0;
1293   unsigned int m = df->regs_inited;
1294 
1295   for (regno = 0; regno < m; regno++)
1296     {
1297       if (include_defs)
1298 	size += DF_REG_DEF_COUNT (regno);
1299       if (include_uses)
1300 	size += DF_REG_USE_COUNT (regno);
1301       if (include_eq_uses)
1302 	size += DF_REG_EQ_USE_COUNT (regno);
1303     }
1304   return size;
1305 }
1306 
1307 
1308 /* Take build ref table for either the uses or defs from the reg-use
1309    or reg-def chains.  This version processes the refs in reg order
1310    which is likely to be best if processing the whole function.  */
1311 
1312 static void
df_reorganize_refs_by_reg_by_reg(struct df_ref_info * ref_info,bool include_defs,bool include_uses,bool include_eq_uses)1313 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1314 				  bool include_defs,
1315 				  bool include_uses,
1316 				  bool include_eq_uses)
1317 {
1318   unsigned int m = df->regs_inited;
1319   unsigned int regno;
1320   unsigned int offset = 0;
1321   unsigned int start;
1322 
1323   if (df->changeable_flags & DF_NO_HARD_REGS)
1324     {
1325       start = FIRST_PSEUDO_REGISTER;
1326       memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1327       memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1328     }
1329   else
1330     start = 0;
1331 
1332   ref_info->total_size
1333     = df_count_refs (include_defs, include_uses, include_eq_uses);
1334 
1335   df_check_and_grow_ref_info (ref_info, 1);
1336 
1337   for (regno = start; regno < m; regno++)
1338     {
1339       int count = 0;
1340       ref_info->begin[regno] = offset;
1341       if (include_defs)
1342 	{
1343 	  df_ref ref = DF_REG_DEF_CHAIN (regno);
1344 	  while (ref)
1345 	    {
1346 	      ref_info->refs[offset] = ref;
1347 	      DF_REF_ID (ref) = offset++;
1348 	      count++;
1349 	      ref = DF_REF_NEXT_REG (ref);
1350 	      gcc_checking_assert (offset < ref_info->refs_size);
1351 	    }
1352 	}
1353       if (include_uses)
1354 	{
1355 	  df_ref ref = DF_REG_USE_CHAIN (regno);
1356 	  while (ref)
1357 	    {
1358 	      ref_info->refs[offset] = ref;
1359 	      DF_REF_ID (ref) = offset++;
1360 	      count++;
1361 	      ref = DF_REF_NEXT_REG (ref);
1362 	      gcc_checking_assert (offset < ref_info->refs_size);
1363 	    }
1364 	}
1365       if (include_eq_uses)
1366 	{
1367 	  df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1368 	  while (ref)
1369 	    {
1370 	      ref_info->refs[offset] = ref;
1371 	      DF_REF_ID (ref) = offset++;
1372 	      count++;
1373 	      ref = DF_REF_NEXT_REG (ref);
1374 	      gcc_checking_assert (offset < ref_info->refs_size);
1375 	    }
1376 	}
1377       ref_info->count[regno] = count;
1378     }
1379 
1380   /* The bitmap size is not decremented when refs are deleted.  So
1381      reset it now that we have squished out all of the empty
1382      slots.  */
1383   ref_info->table_size = offset;
1384 }
1385 
1386 
1387 /* Take build ref table for either the uses or defs from the reg-use
1388    or reg-def chains.  This version processes the refs in insn order
1389    which is likely to be best if processing some segment of the
1390    function.  */
1391 
1392 static void
df_reorganize_refs_by_reg_by_insn(struct df_ref_info * ref_info,bool include_defs,bool include_uses,bool include_eq_uses)1393 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1394 				   bool include_defs,
1395 				   bool include_uses,
1396 				   bool include_eq_uses)
1397 {
1398   bitmap_iterator bi;
1399   unsigned int bb_index;
1400   unsigned int m = df->regs_inited;
1401   unsigned int offset = 0;
1402   unsigned int r;
1403   unsigned int start
1404     = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1405 
1406   memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1407   memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1408 
1409   ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1410   df_check_and_grow_ref_info (ref_info, 1);
1411 
1412   EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1413     {
1414       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1415       rtx_insn *insn;
1416       df_ref def, use;
1417 
1418       if (include_defs)
1419 	FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1420 	  {
1421 	    unsigned int regno = DF_REF_REGNO (def);
1422 	    ref_info->count[regno]++;
1423 	  }
1424       if (include_uses)
1425 	FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1426 	  {
1427 	    unsigned int regno = DF_REF_REGNO (use);
1428 	    ref_info->count[regno]++;
1429 	  }
1430 
1431       FOR_BB_INSNS (bb, insn)
1432 	{
1433 	  if (INSN_P (insn))
1434 	    {
1435 	      struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1436 
1437 	      if (include_defs)
1438 		FOR_EACH_INSN_INFO_DEF (def, insn_info)
1439 		  {
1440 		    unsigned int regno = DF_REF_REGNO (def);
1441 		    ref_info->count[regno]++;
1442 		  }
1443 	      if (include_uses)
1444 		FOR_EACH_INSN_INFO_USE (use, insn_info)
1445 		  {
1446 		    unsigned int regno = DF_REF_REGNO (use);
1447 		    ref_info->count[regno]++;
1448 		  }
1449 	      if (include_eq_uses)
1450 		FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1451 		  {
1452 		    unsigned int regno = DF_REF_REGNO (use);
1453 		    ref_info->count[regno]++;
1454 		  }
1455 	    }
1456 	}
1457     }
1458 
1459   for (r = start; r < m; r++)
1460     {
1461       ref_info->begin[r] = offset;
1462       offset += ref_info->count[r];
1463       ref_info->count[r] = 0;
1464     }
1465 
1466   EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1467     {
1468       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1469       rtx_insn *insn;
1470       df_ref def, use;
1471 
1472       if (include_defs)
1473 	FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1474 	  {
1475 	    unsigned int regno = DF_REF_REGNO (def);
1476 	    if (regno >= start)
1477 	      {
1478 		unsigned int id
1479 		  = ref_info->begin[regno] + ref_info->count[regno]++;
1480 		DF_REF_ID (def) = id;
1481 		ref_info->refs[id] = def;
1482 	      }
1483 	  }
1484       if (include_uses)
1485 	FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1486 	  {
1487 	    unsigned int regno = DF_REF_REGNO (def);
1488 	    if (regno >= start)
1489 	      {
1490 		unsigned int id
1491 		  = ref_info->begin[regno] + ref_info->count[regno]++;
1492 		DF_REF_ID (use) = id;
1493 		ref_info->refs[id] = use;
1494 	      }
1495 	  }
1496 
1497       FOR_BB_INSNS (bb, insn)
1498 	{
1499 	  if (INSN_P (insn))
1500 	    {
1501 	      struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1502 
1503 	      if (include_defs)
1504 		FOR_EACH_INSN_INFO_DEF (def, insn_info)
1505 		  {
1506 		    unsigned int regno = DF_REF_REGNO (def);
1507 		    if (regno >= start)
1508 		      {
1509 			unsigned int id
1510 			  = ref_info->begin[regno] + ref_info->count[regno]++;
1511 			DF_REF_ID (def) = id;
1512 			ref_info->refs[id] = def;
1513 		      }
1514 		  }
1515 	      if (include_uses)
1516 		FOR_EACH_INSN_INFO_USE (use, insn_info)
1517 		  {
1518 		    unsigned int regno = DF_REF_REGNO (use);
1519 		    if (regno >= start)
1520 		      {
1521 			unsigned int id
1522 			  = ref_info->begin[regno] + ref_info->count[regno]++;
1523 			DF_REF_ID (use) = id;
1524 			ref_info->refs[id] = use;
1525 		      }
1526 		  }
1527 	      if (include_eq_uses)
1528 		FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1529 		  {
1530 		    unsigned int regno = DF_REF_REGNO (use);
1531 		    if (regno >= start)
1532 		      {
1533 			unsigned int id
1534 			  = ref_info->begin[regno] + ref_info->count[regno]++;
1535 			DF_REF_ID (use) = id;
1536 			ref_info->refs[id] = use;
1537 		      }
1538 		  }
1539 	    }
1540 	}
1541     }
1542 
1543   /* The bitmap size is not decremented when refs are deleted.  So
1544      reset it now that we have squished out all of the empty
1545      slots.  */
1546 
1547   ref_info->table_size = offset;
1548 }
1549 
1550 /* Take build ref table for either the uses or defs from the reg-use
1551    or reg-def chains.  */
1552 
1553 static void
df_reorganize_refs_by_reg(struct df_ref_info * ref_info,bool include_defs,bool include_uses,bool include_eq_uses)1554 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1555 			   bool include_defs,
1556 			   bool include_uses,
1557 			   bool include_eq_uses)
1558 {
1559   if (df->analyze_subset)
1560     df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1561 				       include_uses, include_eq_uses);
1562   else
1563     df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1564 				       include_uses, include_eq_uses);
1565 }
1566 
1567 
1568 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET.  */
1569 static unsigned int
df_add_refs_to_table(unsigned int offset,struct df_ref_info * ref_info,df_ref ref)1570 df_add_refs_to_table (unsigned int offset,
1571 		      struct df_ref_info *ref_info,
1572 		      df_ref ref)
1573 {
1574   for (; ref; ref = DF_REF_NEXT_LOC (ref))
1575     if (!(df->changeable_flags & DF_NO_HARD_REGS)
1576 	|| (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1577       {
1578 	ref_info->refs[offset] = ref;
1579 	DF_REF_ID (ref) = offset++;
1580       }
1581   return offset;
1582 }
1583 
1584 
1585 /* Count the number of refs in all of the insns of BB. Include the
1586    defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1587    eq_uses if INCLUDE_EQ_USES.  */
1588 
1589 static unsigned int
df_reorganize_refs_by_insn_bb(basic_block bb,unsigned int offset,struct df_ref_info * ref_info,bool include_defs,bool include_uses,bool include_eq_uses)1590 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1591 			       struct df_ref_info *ref_info,
1592 			       bool include_defs, bool include_uses,
1593 			       bool include_eq_uses)
1594 {
1595   rtx_insn *insn;
1596 
1597   if (include_defs)
1598     offset = df_add_refs_to_table (offset, ref_info,
1599 				   df_get_artificial_defs (bb->index));
1600   if (include_uses)
1601     offset = df_add_refs_to_table (offset, ref_info,
1602 				   df_get_artificial_uses (bb->index));
1603 
1604   FOR_BB_INSNS (bb, insn)
1605     if (INSN_P (insn))
1606       {
1607 	unsigned int uid = INSN_UID (insn);
1608 	if (include_defs)
1609 	  offset = df_add_refs_to_table (offset, ref_info,
1610 					 DF_INSN_UID_DEFS (uid));
1611 	if (include_uses)
1612 	  offset = df_add_refs_to_table (offset, ref_info,
1613 					 DF_INSN_UID_USES (uid));
1614 	if (include_eq_uses)
1615 	  offset = df_add_refs_to_table (offset, ref_info,
1616 					 DF_INSN_UID_EQ_USES (uid));
1617       }
1618   return offset;
1619 }
1620 
1621 
1622 /* Organize the refs by insn into the table in REF_INFO.  If
1623    blocks_to_analyze is defined, use that set, otherwise the entire
1624    program.  Include the defs if INCLUDE_DEFS. Include the uses if
1625    INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES.  */
1626 
1627 static void
df_reorganize_refs_by_insn(struct df_ref_info * ref_info,bool include_defs,bool include_uses,bool include_eq_uses)1628 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1629 			    bool include_defs, bool include_uses,
1630 			    bool include_eq_uses)
1631 {
1632   basic_block bb;
1633   unsigned int offset = 0;
1634 
1635   ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1636   df_check_and_grow_ref_info (ref_info, 1);
1637   if (df->blocks_to_analyze)
1638     {
1639       bitmap_iterator bi;
1640       unsigned int index;
1641 
1642       EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1643 	{
1644 	  offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK_FOR_FN (cfun,
1645 								      index),
1646 						  offset, ref_info,
1647 						  include_defs, include_uses,
1648 						  include_eq_uses);
1649 	}
1650 
1651       ref_info->table_size = offset;
1652     }
1653   else
1654     {
1655       FOR_ALL_BB_FN (bb, cfun)
1656 	offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1657 						include_defs, include_uses,
1658 						include_eq_uses);
1659       ref_info->table_size = offset;
1660     }
1661 }
1662 
1663 
1664 /* If the use refs in DF are not organized, reorganize them.  */
1665 
1666 void
df_maybe_reorganize_use_refs(enum df_ref_order order)1667 df_maybe_reorganize_use_refs (enum df_ref_order order)
1668 {
1669   if (order == df->use_info.ref_order)
1670     return;
1671 
1672   switch (order)
1673     {
1674     case DF_REF_ORDER_BY_REG:
1675       df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1676       break;
1677 
1678     case DF_REF_ORDER_BY_REG_WITH_NOTES:
1679       df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1680       break;
1681 
1682     case DF_REF_ORDER_BY_INSN:
1683       df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1684       break;
1685 
1686     case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1687       df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1688       break;
1689 
1690     case DF_REF_ORDER_NO_TABLE:
1691       free (df->use_info.refs);
1692       df->use_info.refs = NULL;
1693       df->use_info.refs_size = 0;
1694       break;
1695 
1696     case DF_REF_ORDER_UNORDERED:
1697     case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1698       gcc_unreachable ();
1699       break;
1700     }
1701 
1702   df->use_info.ref_order = order;
1703 }
1704 
1705 
1706 /* If the def refs in DF are not organized, reorganize them.  */
1707 
1708 void
df_maybe_reorganize_def_refs(enum df_ref_order order)1709 df_maybe_reorganize_def_refs (enum df_ref_order order)
1710 {
1711   if (order == df->def_info.ref_order)
1712     return;
1713 
1714   switch (order)
1715     {
1716     case DF_REF_ORDER_BY_REG:
1717       df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1718       break;
1719 
1720     case DF_REF_ORDER_BY_INSN:
1721       df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1722       break;
1723 
1724     case DF_REF_ORDER_NO_TABLE:
1725       free (df->def_info.refs);
1726       df->def_info.refs = NULL;
1727       df->def_info.refs_size = 0;
1728       break;
1729 
1730     case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1731     case DF_REF_ORDER_BY_REG_WITH_NOTES:
1732     case DF_REF_ORDER_UNORDERED:
1733     case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1734       gcc_unreachable ();
1735       break;
1736     }
1737 
1738   df->def_info.ref_order = order;
1739 }
1740 
1741 
1742 /* Change all of the basic block references in INSN to use the insn's
1743    current basic block.  This function is called from routines that move
1744    instructions from one block to another.  */
1745 
1746 void
df_insn_change_bb(rtx_insn * insn,basic_block new_bb)1747 df_insn_change_bb (rtx_insn *insn, basic_block new_bb)
1748 {
1749   basic_block old_bb = BLOCK_FOR_INSN (insn);
1750   struct df_insn_info *insn_info;
1751   unsigned int uid = INSN_UID (insn);
1752 
1753   if (old_bb == new_bb)
1754     return;
1755 
1756   set_block_for_insn (insn, new_bb);
1757 
1758   if (!df)
1759     return;
1760 
1761   if (dump_file)
1762     fprintf (dump_file, "changing bb of uid %d\n", uid);
1763 
1764   insn_info = DF_INSN_UID_SAFE_GET (uid);
1765   if (insn_info == NULL)
1766     {
1767       if (dump_file)
1768 	fprintf (dump_file, "  unscanned insn\n");
1769       df_insn_rescan (insn);
1770       return;
1771     }
1772 
1773   if (!INSN_P (insn))
1774     return;
1775 
1776   df_set_bb_dirty (new_bb);
1777   if (old_bb)
1778     {
1779       if (dump_file)
1780 	fprintf (dump_file, "  from %d to %d\n",
1781 		 old_bb->index, new_bb->index);
1782       df_set_bb_dirty (old_bb);
1783     }
1784   else
1785     if (dump_file)
1786       fprintf (dump_file, "  to %d\n", new_bb->index);
1787 }
1788 
1789 
1790 /* Helper function for df_ref_change_reg_with_loc.  */
1791 
1792 static void
df_ref_change_reg_with_loc_1(struct df_reg_info * old_df,struct df_reg_info * new_df,unsigned int new_regno,rtx loc)1793 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1794 			      struct df_reg_info *new_df,
1795 			      unsigned int new_regno, rtx loc)
1796 {
1797   df_ref the_ref = old_df->reg_chain;
1798 
1799   while (the_ref)
1800     {
1801       if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1802 	  && DF_REF_LOC (the_ref)
1803 	  && (*DF_REF_LOC (the_ref) == loc))
1804 	{
1805 	  df_ref next_ref = DF_REF_NEXT_REG (the_ref);
1806 	  df_ref prev_ref = DF_REF_PREV_REG (the_ref);
1807 	  df_ref *ref_ptr;
1808 	  struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
1809 
1810 	  DF_REF_REGNO (the_ref) = new_regno;
1811 	  DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1812 
1813 	  /* Pull the_ref out of the old regno chain.  */
1814 	  if (prev_ref)
1815 	    DF_REF_NEXT_REG (prev_ref) = next_ref;
1816 	  else
1817 	    old_df->reg_chain = next_ref;
1818 	  if (next_ref)
1819 	    DF_REF_PREV_REG (next_ref) = prev_ref;
1820 	  old_df->n_refs--;
1821 
1822 	  /* Put the ref into the new regno chain.  */
1823 	  DF_REF_PREV_REG (the_ref) = NULL;
1824 	  DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
1825 	  if (new_df->reg_chain)
1826 	    DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
1827 	  new_df->reg_chain = the_ref;
1828 	  new_df->n_refs++;
1829 	  if (DF_REF_BB (the_ref))
1830 	    df_set_bb_dirty (DF_REF_BB (the_ref));
1831 
1832 	  /* Need to sort the record again that the ref was in because
1833 	     the regno is a sorting key.  First, find the right
1834 	     record.  */
1835 	  if (DF_REF_REG_DEF_P (the_ref))
1836 	    ref_ptr = &insn_info->defs;
1837 	  else if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1838 	    ref_ptr = &insn_info->eq_uses;
1839 	  else
1840 	    ref_ptr = &insn_info->uses;
1841 	  if (dump_file)
1842 	    fprintf (dump_file, "changing reg in insn %d\n",
1843 		     DF_REF_INSN_UID (the_ref));
1844 
1845 	  /* Stop if we find the current reference or where the reference
1846 	     needs to be.  */
1847 	  while (*ref_ptr != the_ref && df_ref_compare (*ref_ptr, the_ref) < 0)
1848 	    ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1849 	  if (*ref_ptr != the_ref)
1850 	    {
1851 	      /* The reference needs to be promoted up the list.  */
1852 	      df_ref next = DF_REF_NEXT_LOC (the_ref);
1853 	      DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1854 	      *ref_ptr = the_ref;
1855 	      do
1856 		ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1857 	      while (*ref_ptr != the_ref);
1858 	      *ref_ptr = next;
1859 	    }
1860 	  else if (DF_REF_NEXT_LOC (the_ref)
1861 		   && df_ref_compare (the_ref, DF_REF_NEXT_LOC (the_ref)) > 0)
1862 	    {
1863 	      /* The reference needs to be demoted down the list.  */
1864 	      *ref_ptr = DF_REF_NEXT_LOC (the_ref);
1865 	      do
1866 		ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1867 	      while (*ref_ptr && df_ref_compare (the_ref, *ref_ptr) > 0);
1868 	      DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1869 	      *ref_ptr = the_ref;
1870 	    }
1871 
1872 	  the_ref = next_ref;
1873 	}
1874       else
1875 	the_ref = DF_REF_NEXT_REG (the_ref);
1876     }
1877 }
1878 
1879 
1880 /* Change the regno of register LOC to NEW_REGNO and update the df
1881    information accordingly.  Refs that do not match LOC are not changed
1882    which means that artificial refs are not changed since they have no loc.
1883    This call is to support the SET_REGNO macro. */
1884 
1885 void
df_ref_change_reg_with_loc(rtx loc,unsigned int new_regno)1886 df_ref_change_reg_with_loc (rtx loc, unsigned int new_regno)
1887 {
1888   unsigned int old_regno = REGNO (loc);
1889   if (old_regno == new_regno)
1890     return;
1891 
1892   if (df)
1893     {
1894       df_grow_reg_info ();
1895 
1896       df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1897 				    DF_REG_DEF_GET (new_regno),
1898 				    new_regno, loc);
1899       df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1900 				    DF_REG_USE_GET (new_regno),
1901 				    new_regno, loc);
1902       df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1903 				    DF_REG_EQ_USE_GET (new_regno),
1904 				    new_regno, loc);
1905     }
1906   set_mode_and_regno (loc, GET_MODE (loc), new_regno);
1907 }
1908 
1909 
1910 /* Delete the mw_hardregs that point into the eq_notes.  */
1911 
1912 static void
df_mw_hardreg_chain_delete_eq_uses(struct df_insn_info * insn_info)1913 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1914 {
1915   struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs;
1916   struct df_scan_problem_data *problem_data
1917     = (struct df_scan_problem_data *) df_scan->problem_data;
1918 
1919   while (*mw_ptr)
1920     {
1921       df_mw_hardreg *mw = *mw_ptr;
1922       if (mw->flags & DF_REF_IN_NOTE)
1923 	{
1924 	  *mw_ptr = DF_MWS_NEXT (mw);
1925 	  problem_data->mw_reg_pool->remove (mw);
1926 	}
1927       else
1928 	mw_ptr = &DF_MWS_NEXT (mw);
1929     }
1930 }
1931 
1932 
1933 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN.  */
1934 
1935 void
df_notes_rescan(rtx_insn * insn)1936 df_notes_rescan (rtx_insn *insn)
1937 {
1938   struct df_insn_info *insn_info;
1939   unsigned int uid = INSN_UID (insn);
1940 
1941   if (!df)
1942     return;
1943 
1944   /* The client has disabled rescanning and plans to do it itself.  */
1945   if (df->changeable_flags & DF_NO_INSN_RESCAN)
1946     return;
1947 
1948   /* Do nothing if the insn hasn't been emitted yet.  */
1949   if (!BLOCK_FOR_INSN (insn))
1950     return;
1951 
1952   df_grow_bb_info (df_scan);
1953   df_grow_reg_info ();
1954 
1955   insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1956 
1957   /* The client has deferred rescanning.  */
1958   if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1959     {
1960       if (!insn_info)
1961 	{
1962 	  insn_info = df_insn_create_insn_record (insn);
1963 	  insn_info->defs = 0;
1964 	  insn_info->uses = 0;
1965 	  insn_info->eq_uses = 0;
1966 	  insn_info->mw_hardregs = 0;
1967 	}
1968 
1969       bitmap_clear_bit (&df->insns_to_delete, uid);
1970       /* If the insn is set to be rescanned, it does not need to also
1971 	 be notes rescanned.  */
1972       if (!bitmap_bit_p (&df->insns_to_rescan, uid))
1973 	bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
1974       return;
1975     }
1976 
1977   bitmap_clear_bit (&df->insns_to_delete, uid);
1978   bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1979 
1980   if (insn_info)
1981     {
1982       basic_block bb = BLOCK_FOR_INSN (insn);
1983       rtx note;
1984       struct df_collection_rec collection_rec;
1985       unsigned int i;
1986 
1987       df_mw_hardreg_chain_delete_eq_uses (insn_info);
1988       df_ref_chain_delete (insn_info->eq_uses);
1989       insn_info->eq_uses = NULL;
1990 
1991       /* Process REG_EQUIV/REG_EQUAL notes */
1992       for (note = REG_NOTES (insn); note;
1993 	   note = XEXP (note, 1))
1994 	{
1995 	  switch (REG_NOTE_KIND (note))
1996 	    {
1997 	    case REG_EQUIV:
1998 	    case REG_EQUAL:
1999 	      df_uses_record (&collection_rec,
2000 			      &XEXP (note, 0), DF_REF_REG_USE,
2001 			      bb, insn_info, DF_REF_IN_NOTE);
2002 	    default:
2003 	      break;
2004 	    }
2005 	}
2006 
2007       /* Find some place to put any new mw_hardregs.  */
2008       df_canonize_collection_rec (&collection_rec);
2009       struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs, *mw;
2010       FOR_EACH_VEC_ELT (collection_rec.mw_vec, i, mw)
2011 	{
2012 	  while (*mw_ptr && df_mw_compare (*mw_ptr, mw) < 0)
2013 	    mw_ptr = &DF_MWS_NEXT (*mw_ptr);
2014 	  DF_MWS_NEXT (mw) = *mw_ptr;
2015 	  *mw_ptr = mw;
2016 	  mw_ptr = &DF_MWS_NEXT (mw);
2017 	}
2018       df_refs_add_to_chains (&collection_rec, bb, insn, copy_eq_uses);
2019     }
2020   else
2021     df_insn_rescan (insn);
2022 
2023 }
2024 
2025 
2026 /*----------------------------------------------------------------------------
2027    Hard core instruction scanning code.  No external interfaces here,
2028    just a lot of routines that look inside insns.
2029 ----------------------------------------------------------------------------*/
2030 
2031 
2032 /* Return true if the contents of two df_ref's are identical.
2033    It ignores DF_REF_MARKER.  */
2034 
2035 static bool
df_ref_equal_p(df_ref ref1,df_ref ref2)2036 df_ref_equal_p (df_ref ref1, df_ref ref2)
2037 {
2038   if (!ref2)
2039     return false;
2040 
2041   if (ref1 == ref2)
2042     return true;
2043 
2044   if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2045       || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2046       || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2047       || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2048       || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2049 	  != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2050       || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2051       || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2052     return false;
2053 
2054   switch (DF_REF_CLASS (ref1))
2055     {
2056     case DF_REF_ARTIFICIAL:
2057     case DF_REF_BASE:
2058       return true;
2059 
2060     case DF_REF_REGULAR:
2061       return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2062 
2063     default:
2064       gcc_unreachable ();
2065     }
2066   return false;
2067 }
2068 
2069 
2070 /* Compare REF1 and REF2 for sorting.  This is only called from places
2071    where all of the refs are of the same type, in the same insn, and
2072    have the same bb.  So these fields are not checked.  */
2073 
2074 static int
df_ref_compare(df_ref ref1,df_ref ref2)2075 df_ref_compare (df_ref ref1, df_ref ref2)
2076 {
2077   if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2078     return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2079 
2080   if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2081     return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2082 
2083   if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2084     return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2085 
2086   if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2087     return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2088 
2089   /* Cannot look at the LOC field on artificial refs.  */
2090   if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2091       && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2092     return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2093 
2094   if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2095     {
2096       /* If two refs are identical except that one of them has is from
2097 	 a mw and one is not, we need to have the one with the mw
2098 	 first.  */
2099       if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2100 	  DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2101 	return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2102       else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2103 	return -1;
2104       else
2105 	return 1;
2106     }
2107 
2108   return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2109 }
2110 
2111 /* Like df_ref_compare, but compare two df_ref* pointers R1 and R2.  */
2112 
2113 static int
df_ref_ptr_compare(const void * r1,const void * r2)2114 df_ref_ptr_compare (const void *r1, const void *r2)
2115 {
2116   return df_ref_compare (*(const df_ref *) r1, *(const df_ref *) r2);
2117 }
2118 
2119 /* Sort and compress a set of refs.  */
2120 
2121 static void
df_sort_and_compress_refs(vec<df_ref,va_heap> * ref_vec)2122 df_sort_and_compress_refs (vec<df_ref, va_heap> *ref_vec)
2123 {
2124   unsigned int count;
2125   unsigned int i;
2126   unsigned int dist = 0;
2127 
2128   count = ref_vec->length ();
2129 
2130   /* If there are 1 or 0 elements, there is nothing to do.  */
2131   if (count < 2)
2132     return;
2133   else if (count == 2)
2134     {
2135       df_ref r0 = (*ref_vec)[0];
2136       df_ref r1 = (*ref_vec)[1];
2137       if (df_ref_compare (r0, r1) > 0)
2138 	std::swap ((*ref_vec)[0], (*ref_vec)[1]);
2139     }
2140   else
2141     {
2142       for (i = 0; i < count - 1; i++)
2143 	{
2144 	  df_ref r0 = (*ref_vec)[i];
2145 	  df_ref r1 = (*ref_vec)[i + 1];
2146 	  if (df_ref_compare (r0, r1) >= 0)
2147 	    break;
2148 	}
2149       /* If the array is already strictly ordered,
2150          which is the most common case for large COUNT case
2151          (which happens for CALL INSNs),
2152          no need to sort and filter out duplicate.
2153          Simply return the count.
2154          Make sure DF_GET_ADD_REFS adds refs in the increasing order
2155          of DF_REF_COMPARE.  */
2156       if (i == count - 1)
2157         return;
2158       ref_vec->qsort (df_ref_ptr_compare);
2159     }
2160 
2161   for (i=0; i<count-dist; i++)
2162     {
2163       /* Find the next ref that is not equal to the current ref.  */
2164       while (i + dist + 1 < count
2165 	     && df_ref_equal_p ((*ref_vec)[i],
2166 				(*ref_vec)[i + dist + 1]))
2167 	{
2168 	  df_free_ref ((*ref_vec)[i + dist + 1]);
2169 	  dist++;
2170 	}
2171       /* Copy it down to the next position.  */
2172       if (dist && i + dist + 1 < count)
2173 	(*ref_vec)[i + 1] = (*ref_vec)[i + dist + 1];
2174     }
2175 
2176   count -= dist;
2177   ref_vec->truncate (count);
2178 }
2179 
2180 
2181 /* Return true if the contents of two df_ref's are identical.
2182    It ignores DF_REF_MARKER.  */
2183 
2184 static bool
df_mw_equal_p(struct df_mw_hardreg * mw1,struct df_mw_hardreg * mw2)2185 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2186 {
2187   if (!mw2)
2188     return false;
2189   return (mw1 == mw2) ||
2190     (mw1->mw_reg == mw2->mw_reg
2191      && mw1->type == mw2->type
2192      && mw1->flags == mw2->flags
2193      && mw1->start_regno == mw2->start_regno
2194      && mw1->end_regno == mw2->end_regno);
2195 }
2196 
2197 
2198 /* Compare MW1 and MW2 for sorting.  */
2199 
2200 static int
df_mw_compare(const df_mw_hardreg * mw1,const df_mw_hardreg * mw2)2201 df_mw_compare (const df_mw_hardreg *mw1, const df_mw_hardreg *mw2)
2202 {
2203   if (mw1->type != mw2->type)
2204     return mw1->type - mw2->type;
2205 
2206   if (mw1->flags != mw2->flags)
2207     return mw1->flags - mw2->flags;
2208 
2209   if (mw1->start_regno != mw2->start_regno)
2210     return mw1->start_regno - mw2->start_regno;
2211 
2212   if (mw1->end_regno != mw2->end_regno)
2213     return mw1->end_regno - mw2->end_regno;
2214 
2215   if (mw1->mw_reg != mw2->mw_reg)
2216     return mw1->mw_order - mw2->mw_order;
2217 
2218   return 0;
2219 }
2220 
2221 /* Like df_mw_compare, but compare two df_mw_hardreg** pointers R1 and R2.  */
2222 
2223 static int
df_mw_ptr_compare(const void * m1,const void * m2)2224 df_mw_ptr_compare (const void *m1, const void *m2)
2225 {
2226   return df_mw_compare (*(const df_mw_hardreg *const *) m1,
2227 			*(const df_mw_hardreg *const *) m2);
2228 }
2229 
2230 /* Sort and compress a set of refs.  */
2231 
2232 static void
df_sort_and_compress_mws(vec<df_mw_hardreg *,va_heap> * mw_vec)2233 df_sort_and_compress_mws (vec<df_mw_hardreg *, va_heap> *mw_vec)
2234 {
2235   unsigned int count;
2236   struct df_scan_problem_data *problem_data
2237     = (struct df_scan_problem_data *) df_scan->problem_data;
2238   unsigned int i;
2239   unsigned int dist = 0;
2240 
2241   count = mw_vec->length ();
2242   if (count < 2)
2243     return;
2244   else if (count == 2)
2245     {
2246       struct df_mw_hardreg *m0 = (*mw_vec)[0];
2247       struct df_mw_hardreg *m1 = (*mw_vec)[1];
2248       if (df_mw_compare (m0, m1) > 0)
2249         {
2250           struct df_mw_hardreg *tmp = (*mw_vec)[0];
2251 	  (*mw_vec)[0] = (*mw_vec)[1];
2252 	  (*mw_vec)[1] = tmp;
2253         }
2254     }
2255   else
2256     mw_vec->qsort (df_mw_ptr_compare);
2257 
2258   for (i=0; i<count-dist; i++)
2259     {
2260       /* Find the next ref that is not equal to the current ref.  */
2261       while (i + dist + 1 < count
2262 	     && df_mw_equal_p ((*mw_vec)[i], (*mw_vec)[i + dist + 1]))
2263 	{
2264 	  problem_data->mw_reg_pool->remove ((*mw_vec)[i + dist + 1]);
2265 	  dist++;
2266 	}
2267       /* Copy it down to the next position.  */
2268       if (dist && i + dist + 1 < count)
2269 	(*mw_vec)[i + 1] = (*mw_vec)[i + dist + 1];
2270     }
2271 
2272   count -= dist;
2273   mw_vec->truncate (count);
2274 }
2275 
2276 
2277 /* Sort and remove duplicates from the COLLECTION_REC.  */
2278 
2279 static void
df_canonize_collection_rec(struct df_collection_rec * collection_rec)2280 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2281 {
2282   df_sort_and_compress_refs (&collection_rec->def_vec);
2283   df_sort_and_compress_refs (&collection_rec->use_vec);
2284   df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2285   df_sort_and_compress_mws (&collection_rec->mw_vec);
2286 }
2287 
2288 
2289 /* Add the new df_ref to appropriate reg_info/ref_info chains.  */
2290 
2291 static void
df_install_ref(df_ref this_ref,struct df_reg_info * reg_info,struct df_ref_info * ref_info,bool add_to_table)2292 df_install_ref (df_ref this_ref,
2293 		struct df_reg_info *reg_info,
2294 		struct df_ref_info *ref_info,
2295 		bool add_to_table)
2296 {
2297   unsigned int regno = DF_REF_REGNO (this_ref);
2298   /* Add the ref to the reg_{def,use,eq_use} chain.  */
2299   df_ref head = reg_info->reg_chain;
2300 
2301   reg_info->reg_chain = this_ref;
2302   reg_info->n_refs++;
2303 
2304   if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2305     {
2306       gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2307       df->hard_regs_live_count[regno]++;
2308     }
2309 
2310   gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2311 		       && DF_REF_PREV_REG (this_ref) == NULL);
2312 
2313   DF_REF_NEXT_REG (this_ref) = head;
2314 
2315   /* We cannot actually link to the head of the chain.  */
2316   DF_REF_PREV_REG (this_ref) = NULL;
2317 
2318   if (head)
2319     DF_REF_PREV_REG (head) = this_ref;
2320 
2321   if (add_to_table)
2322     {
2323       gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2324       df_check_and_grow_ref_info (ref_info, 1);
2325       DF_REF_ID (this_ref) = ref_info->table_size;
2326       /* Add the ref to the big array of defs.  */
2327       ref_info->refs[ref_info->table_size] = this_ref;
2328       ref_info->table_size++;
2329     }
2330   else
2331     DF_REF_ID (this_ref) = -1;
2332 
2333   ref_info->total_size++;
2334 }
2335 
2336 
2337 /* This function takes one of the groups of refs (defs, uses or
2338    eq_uses) and installs the entire group into the insn.  It also adds
2339    each of these refs into the appropriate chains.  */
2340 
2341 static df_ref
df_install_refs(basic_block bb,const vec<df_ref,va_heap> * old_vec,struct df_reg_info ** reg_info,struct df_ref_info * ref_info,bool is_notes)2342 df_install_refs (basic_block bb,
2343 		 const vec<df_ref, va_heap> *old_vec,
2344 		 struct df_reg_info **reg_info,
2345 		 struct df_ref_info *ref_info,
2346 		 bool is_notes)
2347 {
2348   unsigned int count = old_vec->length ();
2349   if (count)
2350     {
2351       bool add_to_table;
2352       df_ref this_ref;
2353       unsigned int ix;
2354 
2355       switch (ref_info->ref_order)
2356 	{
2357 	case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2358 	case DF_REF_ORDER_BY_REG_WITH_NOTES:
2359 	case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2360 	  ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2361 	  add_to_table = true;
2362 	  break;
2363 	case DF_REF_ORDER_UNORDERED:
2364 	case DF_REF_ORDER_BY_REG:
2365 	case DF_REF_ORDER_BY_INSN:
2366 	  ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2367 	  add_to_table = !is_notes;
2368 	  break;
2369 	default:
2370 	  add_to_table = false;
2371 	  break;
2372 	}
2373 
2374       /* Do not add if ref is not in the right blocks.  */
2375       if (add_to_table && df->analyze_subset)
2376 	add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2377 
2378       FOR_EACH_VEC_ELT (*old_vec, ix, this_ref)
2379 	{
2380 	  DF_REF_NEXT_LOC (this_ref) = (ix + 1 < old_vec->length ()
2381 					? (*old_vec)[ix + 1]
2382 					: NULL);
2383 	  df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2384 			  ref_info, add_to_table);
2385 	}
2386       return (*old_vec)[0];
2387     }
2388   else
2389     return 0;
2390 }
2391 
2392 
2393 /* This function takes the mws installs the entire group into the
2394    insn.  */
2395 
2396 static struct df_mw_hardreg *
df_install_mws(const vec<df_mw_hardreg *,va_heap> * old_vec)2397 df_install_mws (const vec<df_mw_hardreg *, va_heap> *old_vec)
2398 {
2399   unsigned int count = old_vec->length ();
2400   if (count)
2401     {
2402       for (unsigned int i = 0; i < count - 1; i++)
2403 	DF_MWS_NEXT ((*old_vec)[i]) = (*old_vec)[i + 1];
2404       DF_MWS_NEXT ((*old_vec)[count - 1]) = 0;
2405       return (*old_vec)[0];
2406     }
2407   else
2408     return 0;
2409 }
2410 
2411 
2412 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2413    chains and update other necessary information.  */
2414 
2415 static void
df_refs_add_to_chains(struct df_collection_rec * collection_rec,basic_block bb,rtx_insn * insn,unsigned int flags)2416 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2417 		       basic_block bb, rtx_insn *insn, unsigned int flags)
2418 {
2419   if (insn)
2420     {
2421       struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2422       /* If there is a vector in the collection rec, add it to the
2423 	 insn.  A null rec is a signal that the caller will handle the
2424 	 chain specially.  */
2425       if (flags & copy_defs)
2426 	{
2427 	  gcc_checking_assert (!insn_rec->defs);
2428 	  insn_rec->defs
2429 	    = df_install_refs (bb, &collection_rec->def_vec,
2430 			       df->def_regs,
2431 			       &df->def_info, false);
2432 	}
2433       if (flags & copy_uses)
2434 	{
2435 	  gcc_checking_assert (!insn_rec->uses);
2436 	  insn_rec->uses
2437 	    = df_install_refs (bb, &collection_rec->use_vec,
2438 			       df->use_regs,
2439 			       &df->use_info, false);
2440 	}
2441       if (flags & copy_eq_uses)
2442 	{
2443 	  gcc_checking_assert (!insn_rec->eq_uses);
2444 	  insn_rec->eq_uses
2445 	    = df_install_refs (bb, &collection_rec->eq_use_vec,
2446 			       df->eq_use_regs,
2447 			       &df->use_info, true);
2448 	}
2449       if (flags & copy_mw)
2450 	{
2451 	  gcc_checking_assert (!insn_rec->mw_hardregs);
2452 	  insn_rec->mw_hardregs
2453 	    = df_install_mws (&collection_rec->mw_vec);
2454 	}
2455     }
2456   else
2457     {
2458       struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2459 
2460       gcc_checking_assert (!bb_info->artificial_defs);
2461       bb_info->artificial_defs
2462 	= df_install_refs (bb, &collection_rec->def_vec,
2463 			   df->def_regs,
2464 			   &df->def_info, false);
2465       gcc_checking_assert (!bb_info->artificial_uses);
2466       bb_info->artificial_uses
2467 	= df_install_refs (bb, &collection_rec->use_vec,
2468 			   df->use_regs,
2469 			   &df->use_info, false);
2470     }
2471 }
2472 
2473 
2474 /* Allocate a ref and initialize its fields.  */
2475 
2476 static df_ref
df_ref_create_structure(enum df_ref_class cl,struct df_collection_rec * collection_rec,rtx reg,rtx * loc,basic_block bb,struct df_insn_info * info,enum df_ref_type ref_type,int ref_flags)2477 df_ref_create_structure (enum df_ref_class cl,
2478 			 struct df_collection_rec *collection_rec,
2479 			 rtx reg, rtx *loc,
2480 			 basic_block bb, struct df_insn_info *info,
2481 			 enum df_ref_type ref_type,
2482 			 int ref_flags)
2483 {
2484   df_ref this_ref = NULL;
2485   int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2486   struct df_scan_problem_data *problem_data
2487     = (struct df_scan_problem_data *) df_scan->problem_data;
2488 
2489   switch (cl)
2490     {
2491     case DF_REF_BASE:
2492       this_ref = (df_ref) (problem_data->ref_base_pool->allocate ());
2493       gcc_checking_assert (loc == NULL);
2494       break;
2495 
2496     case DF_REF_ARTIFICIAL:
2497       this_ref = (df_ref) (problem_data->ref_artificial_pool->allocate ());
2498       this_ref->artificial_ref.bb = bb;
2499       gcc_checking_assert (loc == NULL);
2500       break;
2501 
2502     case DF_REF_REGULAR:
2503       this_ref = (df_ref) (problem_data->ref_regular_pool->allocate ());
2504       this_ref->regular_ref.loc = loc;
2505       gcc_checking_assert (loc);
2506       break;
2507     }
2508 
2509   DF_REF_CLASS (this_ref) = cl;
2510   DF_REF_ID (this_ref) = -1;
2511   DF_REF_REG (this_ref) = reg;
2512   DF_REF_REGNO (this_ref) =  regno;
2513   DF_REF_TYPE (this_ref) = ref_type;
2514   DF_REF_INSN_INFO (this_ref) = info;
2515   DF_REF_CHAIN (this_ref) = NULL;
2516   DF_REF_FLAGS (this_ref) = ref_flags;
2517   DF_REF_NEXT_REG (this_ref) = NULL;
2518   DF_REF_PREV_REG (this_ref) = NULL;
2519   DF_REF_ORDER (this_ref) = df->ref_order++;
2520 
2521   /* We need to clear this bit because fwprop, and in the future
2522      possibly other optimizations sometimes create new refs using ond
2523      refs as the model.  */
2524   DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2525 
2526   /* See if this ref needs to have DF_HARD_REG_LIVE bit set.  */
2527   if (regno < FIRST_PSEUDO_REGISTER
2528       && !DF_REF_IS_ARTIFICIAL (this_ref)
2529       && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
2530     {
2531       if (DF_REF_REG_DEF_P (this_ref))
2532 	{
2533 	  if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2534 	    DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2535 	}
2536       else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2537 		 && (regno == FRAME_POINTER_REGNUM
2538 		     || regno == ARG_POINTER_REGNUM)))
2539 	DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2540     }
2541 
2542   if (collection_rec)
2543     {
2544       if (DF_REF_REG_DEF_P (this_ref))
2545 	collection_rec->def_vec.safe_push (this_ref);
2546       else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2547 	collection_rec->eq_use_vec.safe_push (this_ref);
2548       else
2549 	collection_rec->use_vec.safe_push (this_ref);
2550     }
2551   else
2552     df_install_ref_incremental (this_ref);
2553 
2554   return this_ref;
2555 }
2556 
2557 
2558 /* Create new references of type DF_REF_TYPE for each part of register REG
2559    at address LOC within INSN of BB.  */
2560 
2561 
2562 static void
df_ref_record(enum df_ref_class cl,struct df_collection_rec * collection_rec,rtx reg,rtx * loc,basic_block bb,struct df_insn_info * insn_info,enum df_ref_type ref_type,int ref_flags)2563 df_ref_record (enum df_ref_class cl,
2564 	       struct df_collection_rec *collection_rec,
2565                rtx reg, rtx *loc,
2566 	       basic_block bb, struct df_insn_info *insn_info,
2567 	       enum df_ref_type ref_type,
2568 	       int ref_flags)
2569 {
2570   unsigned int regno;
2571 
2572   gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2573 
2574   regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2575   if (regno < FIRST_PSEUDO_REGISTER)
2576     {
2577       struct df_mw_hardreg *hardreg = NULL;
2578       struct df_scan_problem_data *problem_data
2579         = (struct df_scan_problem_data *) df_scan->problem_data;
2580       unsigned int i;
2581       unsigned int endregno;
2582       df_ref ref;
2583 
2584       if (GET_CODE (reg) == SUBREG)
2585 	{
2586 	  regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2587 					SUBREG_BYTE (reg), GET_MODE (reg));
2588 	  endregno = regno + subreg_nregs (reg);
2589 	}
2590       else
2591 	endregno = END_REGNO (reg);
2592 
2593       /*  If this is a multiword hardreg, we create some extra
2594 	  datastructures that will enable us to easily build REG_DEAD
2595 	  and REG_UNUSED notes.  */
2596       if (collection_rec
2597 	  && (endregno != regno + 1) && insn_info)
2598 	{
2599 	  /* Sets to a subreg of a multiword register are partial.
2600 	     Sets to a non-subreg of a multiword register are not.  */
2601 	  if (GET_CODE (reg) == SUBREG)
2602 	    ref_flags |= DF_REF_PARTIAL;
2603 	  ref_flags |= DF_REF_MW_HARDREG;
2604 
2605 	  hardreg = problem_data->mw_reg_pool->allocate ();
2606 	  hardreg->type = ref_type;
2607 	  hardreg->flags = ref_flags;
2608 	  hardreg->mw_reg = reg;
2609 	  hardreg->start_regno = regno;
2610 	  hardreg->end_regno = endregno - 1;
2611 	  hardreg->mw_order = df->ref_order++;
2612 	  collection_rec->mw_vec.safe_push (hardreg);
2613 	}
2614 
2615       for (i = regno; i < endregno; i++)
2616 	{
2617 	  ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2618 					 bb, insn_info, ref_type, ref_flags);
2619 
2620           gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2621 	}
2622     }
2623   else
2624     {
2625       df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2626 			       ref_type, ref_flags);
2627     }
2628 }
2629 
2630 
2631 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2632    covered by the outer mode is smaller than that covered by the inner mode,
2633    is a read-modify-write operation.
2634    This function returns true iff the SUBREG X is such a SUBREG.  */
2635 
2636 bool
df_read_modify_subreg_p(rtx x)2637 df_read_modify_subreg_p (rtx x)
2638 {
2639   unsigned int isize, osize;
2640   if (GET_CODE (x) != SUBREG)
2641     return false;
2642   isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2643   osize = GET_MODE_SIZE (GET_MODE (x));
2644   return isize > osize
2645 	 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2646 }
2647 
2648 
2649 /* Process all the registers defined in the rtx pointed by LOC.
2650    Autoincrement/decrement definitions will be picked up by df_uses_record.
2651    Any change here has to be matched in df_find_hard_reg_defs_1.  */
2652 
2653 static void
df_def_record_1(struct df_collection_rec * collection_rec,rtx * loc,basic_block bb,struct df_insn_info * insn_info,int flags)2654 df_def_record_1 (struct df_collection_rec *collection_rec,
2655                  rtx *loc, basic_block bb, struct df_insn_info *insn_info,
2656 		 int flags)
2657 {
2658   rtx dst = *loc;
2659 
2660   /* It is legal to have a set destination be a parallel. */
2661   if (GET_CODE (dst) == PARALLEL)
2662     {
2663       int i;
2664       for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2665 	{
2666 	  rtx temp = XVECEXP (dst, 0, i);
2667 	  gcc_assert (GET_CODE (temp) == EXPR_LIST);
2668 	  df_def_record_1 (collection_rec, &XEXP (temp, 0),
2669 			   bb, insn_info, flags);
2670 	}
2671       return;
2672     }
2673 
2674   if (GET_CODE (dst) == STRICT_LOW_PART)
2675     {
2676       flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2677 
2678       loc = &XEXP (dst, 0);
2679       dst = *loc;
2680     }
2681 
2682   if (GET_CODE (dst) == ZERO_EXTRACT)
2683     {
2684       flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2685 
2686       loc = &XEXP (dst, 0);
2687       dst = *loc;
2688     }
2689 
2690   /* At this point if we do not have a reg or a subreg, just return.  */
2691   if (REG_P (dst))
2692     {
2693       df_ref_record (DF_REF_REGULAR, collection_rec,
2694 		     dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2695 
2696       /* We want to keep sp alive everywhere - by making all
2697 	 writes to sp also use of sp. */
2698       if (REGNO (dst) == STACK_POINTER_REGNUM)
2699 	df_ref_record (DF_REF_BASE, collection_rec,
2700 		       dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2701     }
2702   else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2703     {
2704       if (df_read_modify_subreg_p (dst))
2705 	flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2706 
2707       flags |= DF_REF_SUBREG;
2708 
2709       df_ref_record (DF_REF_REGULAR, collection_rec,
2710 		     dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2711     }
2712 }
2713 
2714 
2715 /* Process all the registers defined in the pattern rtx, X.  Any change
2716    here has to be matched in df_find_hard_reg_defs.  */
2717 
2718 static void
df_defs_record(struct df_collection_rec * collection_rec,rtx x,basic_block bb,struct df_insn_info * insn_info,int flags)2719 df_defs_record (struct df_collection_rec *collection_rec,
2720                 rtx x, basic_block bb, struct df_insn_info *insn_info,
2721 		int flags)
2722 {
2723   RTX_CODE code = GET_CODE (x);
2724   int i;
2725 
2726   switch (code)
2727     {
2728     case SET:
2729       df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags);
2730       break;
2731 
2732     case CLOBBER:
2733       flags |= DF_REF_MUST_CLOBBER;
2734       df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags);
2735       break;
2736 
2737     case COND_EXEC:
2738       df_defs_record (collection_rec, COND_EXEC_CODE (x),
2739 		      bb, insn_info, DF_REF_CONDITIONAL);
2740       break;
2741 
2742     case PARALLEL:
2743       for (i = 0; i < XVECLEN (x, 0); i++)
2744 	df_defs_record (collection_rec, XVECEXP (x, 0, i),
2745 			bb, insn_info, flags);
2746       break;
2747     default:
2748       /* No DEFs to record in other cases */
2749       break;
2750     }
2751 }
2752 
2753 /* Set bits in *DEFS for hard registers found in the rtx DST, which is the
2754    destination of a set or clobber.  This has to match the logic in
2755    df_defs_record_1.  */
2756 
2757 static void
df_find_hard_reg_defs_1(rtx dst,HARD_REG_SET * defs)2758 df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs)
2759 {
2760   /* It is legal to have a set destination be a parallel. */
2761   if (GET_CODE (dst) == PARALLEL)
2762     {
2763       int i;
2764       for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2765 	{
2766 	  rtx temp = XVECEXP (dst, 0, i);
2767 	  gcc_assert (GET_CODE (temp) == EXPR_LIST);
2768 	  df_find_hard_reg_defs_1 (XEXP (temp, 0), defs);
2769 	}
2770       return;
2771     }
2772 
2773   if (GET_CODE (dst) == STRICT_LOW_PART)
2774       dst = XEXP (dst, 0);
2775 
2776   if (GET_CODE (dst) == ZERO_EXTRACT)
2777       dst = XEXP (dst, 0);
2778 
2779   /* At this point if we do not have a reg or a subreg, just return.  */
2780   if (REG_P (dst) && HARD_REGISTER_P (dst))
2781     SET_HARD_REG_BIT (*defs, REGNO (dst));
2782   else if (GET_CODE (dst) == SUBREG
2783 	   && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst))
2784     SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst)));
2785 }
2786 
2787 /* Set bits in *DEFS for hard registers defined in the pattern X.  This
2788    has to match the logic in df_defs_record.  */
2789 
2790 static void
df_find_hard_reg_defs(rtx x,HARD_REG_SET * defs)2791 df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
2792 {
2793   RTX_CODE code = GET_CODE (x);
2794   int i;
2795 
2796   switch (code)
2797     {
2798     case SET:
2799       df_find_hard_reg_defs_1 (SET_DEST (x), defs);
2800       break;
2801 
2802     case CLOBBER:
2803       df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
2804       break;
2805 
2806     case COND_EXEC:
2807       df_find_hard_reg_defs (COND_EXEC_CODE (x), defs);
2808       break;
2809 
2810     case PARALLEL:
2811       for (i = 0; i < XVECLEN (x, 0); i++)
2812 	df_find_hard_reg_defs (XVECEXP (x, 0, i), defs);
2813       break;
2814     default:
2815       /* No DEFs to record in other cases */
2816       break;
2817     }
2818 }
2819 
2820 
2821 /* Process all the registers used in the rtx at address LOC.  */
2822 
2823 static void
df_uses_record(struct df_collection_rec * collection_rec,rtx * loc,enum df_ref_type ref_type,basic_block bb,struct df_insn_info * insn_info,int flags)2824 df_uses_record (struct df_collection_rec *collection_rec,
2825                 rtx *loc, enum df_ref_type ref_type,
2826 		basic_block bb, struct df_insn_info *insn_info,
2827 		int flags)
2828 {
2829   RTX_CODE code;
2830   rtx x;
2831 
2832  retry:
2833   x = *loc;
2834   if (!x)
2835     return;
2836   code = GET_CODE (x);
2837   switch (code)
2838     {
2839     case LABEL_REF:
2840     case SYMBOL_REF:
2841     case CONST:
2842     CASE_CONST_ANY:
2843     case PC:
2844     case CC0:
2845     case ADDR_VEC:
2846     case ADDR_DIFF_VEC:
2847       return;
2848 
2849     case CLOBBER:
2850       /* If we are clobbering a MEM, mark any registers inside the address
2851 	 as being used.  */
2852       if (MEM_P (XEXP (x, 0)))
2853 	df_uses_record (collection_rec,
2854 			&XEXP (XEXP (x, 0), 0),
2855 			DF_REF_REG_MEM_STORE,
2856 		        bb, insn_info,
2857 			flags);
2858 
2859       /* If we're clobbering a REG then we have a def so ignore.  */
2860       return;
2861 
2862     case MEM:
2863       df_uses_record (collection_rec,
2864 		      &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2865 		      bb, insn_info, flags & DF_REF_IN_NOTE);
2866       return;
2867 
2868     case SUBREG:
2869       /* While we're here, optimize this case.  */
2870       flags |= DF_REF_PARTIAL;
2871       /* In case the SUBREG is not of a REG, do not optimize.  */
2872       if (!REG_P (SUBREG_REG (x)))
2873 	{
2874 	  loc = &SUBREG_REG (x);
2875 	  df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
2876 	  return;
2877 	}
2878       /* ... Fall through ...  */
2879 
2880     case REG:
2881       df_ref_record (DF_REF_REGULAR, collection_rec,
2882 		     x, loc, bb, insn_info,
2883 		     ref_type, flags);
2884       return;
2885 
2886     case SIGN_EXTRACT:
2887     case ZERO_EXTRACT:
2888       {
2889         df_uses_record (collection_rec,
2890                         &XEXP (x, 1), ref_type, bb, insn_info, flags);
2891         df_uses_record (collection_rec,
2892                         &XEXP (x, 2), ref_type, bb, insn_info, flags);
2893 
2894         /* If the parameters to the zero or sign extract are
2895            constants, strip them off and recurse, otherwise there is
2896            no information that we can gain from this operation.  */
2897         if (code == ZERO_EXTRACT)
2898           flags |= DF_REF_ZERO_EXTRACT;
2899         else
2900           flags |= DF_REF_SIGN_EXTRACT;
2901 
2902         df_uses_record (collection_rec,
2903                         &XEXP (x, 0), ref_type, bb, insn_info, flags);
2904         return;
2905       }
2906       break;
2907 
2908     case SET:
2909       {
2910 	rtx dst = SET_DEST (x);
2911 	gcc_assert (!(flags & DF_REF_IN_NOTE));
2912 	df_uses_record (collection_rec,
2913 			&SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
2914 
2915 	switch (GET_CODE (dst))
2916 	  {
2917 	    case SUBREG:
2918 	      if (df_read_modify_subreg_p (dst))
2919 		{
2920 		  df_uses_record (collection_rec, &SUBREG_REG (dst),
2921 				  DF_REF_REG_USE, bb, insn_info,
2922 				  flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
2923 		  break;
2924 		}
2925 	      /* Fall through.  */
2926 	    case REG:
2927 	    case PARALLEL:
2928 	    case SCRATCH:
2929 	    case PC:
2930 	    case CC0:
2931 		break;
2932 	    case MEM:
2933 	      df_uses_record (collection_rec, &XEXP (dst, 0),
2934 			      DF_REF_REG_MEM_STORE, bb, insn_info, flags);
2935 	      break;
2936 	    case STRICT_LOW_PART:
2937 	      {
2938 		rtx *temp = &XEXP (dst, 0);
2939 		/* A strict_low_part uses the whole REG and not just the
2940 		 SUBREG.  */
2941 		dst = XEXP (dst, 0);
2942 		df_uses_record (collection_rec,
2943 				(GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2944 				DF_REF_REG_USE, bb, insn_info,
2945 				DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
2946 	      }
2947 	      break;
2948 	    case ZERO_EXTRACT:
2949 	      {
2950 		df_uses_record (collection_rec, &XEXP (dst, 1),
2951 				DF_REF_REG_USE, bb, insn_info, flags);
2952 		df_uses_record (collection_rec, &XEXP (dst, 2),
2953 				DF_REF_REG_USE, bb, insn_info, flags);
2954                 if (GET_CODE (XEXP (dst,0)) == MEM)
2955                   df_uses_record (collection_rec, &XEXP (dst, 0),
2956                                   DF_REF_REG_USE, bb, insn_info,
2957                                   flags);
2958                 else
2959                   df_uses_record (collection_rec, &XEXP (dst, 0),
2960                                   DF_REF_REG_USE, bb, insn_info,
2961                                   DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
2962 	      }
2963 	      break;
2964 
2965 	    default:
2966 	      gcc_unreachable ();
2967 	  }
2968 	return;
2969       }
2970 
2971     case RETURN:
2972     case SIMPLE_RETURN:
2973       break;
2974 
2975     case ASM_OPERANDS:
2976     case UNSPEC_VOLATILE:
2977     case TRAP_IF:
2978     case ASM_INPUT:
2979       {
2980 	/* Traditional and volatile asm instructions must be
2981 	   considered to use and clobber all hard registers, all
2982 	   pseudo-registers and all of memory.  So must TRAP_IF and
2983 	   UNSPEC_VOLATILE operations.
2984 
2985 	   Consider for instance a volatile asm that changes the fpu
2986 	   rounding mode.  An insn should not be moved across this
2987 	   even if it only uses pseudo-regs because it might give an
2988 	   incorrectly rounded result.
2989 
2990 	   However, flow.c's liveness computation did *not* do this,
2991 	   giving the reasoning as " ?!? Unfortunately, marking all
2992 	   hard registers as live causes massive problems for the
2993 	   register allocator and marking all pseudos as live creates
2994 	   mountains of uninitialized variable warnings."
2995 
2996 	   In order to maintain the status quo with regard to liveness
2997 	   and uses, we do what flow.c did and just mark any regs we
2998 	   can find in ASM_OPERANDS as used.  In global asm insns are
2999 	   scanned and regs_asm_clobbered is filled out.
3000 
3001 	   For all ASM_OPERANDS, we must traverse the vector of input
3002 	   operands.  We can not just fall through here since then we
3003 	   would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3004 	   which do not indicate traditional asms unlike their normal
3005 	   usage.  */
3006 	if (code == ASM_OPERANDS)
3007 	  {
3008 	    int j;
3009 
3010 	    for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3011 	      df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3012 			      DF_REF_REG_USE, bb, insn_info, flags);
3013 	    return;
3014 	  }
3015 	break;
3016       }
3017 
3018     case VAR_LOCATION:
3019       df_uses_record (collection_rec,
3020 		      &PAT_VAR_LOCATION_LOC (x),
3021 		      DF_REF_REG_USE, bb, insn_info, flags);
3022       return;
3023 
3024     case PRE_DEC:
3025     case POST_DEC:
3026     case PRE_INC:
3027     case POST_INC:
3028     case PRE_MODIFY:
3029     case POST_MODIFY:
3030       gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3031       /* Catch the def of the register being modified.  */
3032       df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3033 		     bb, insn_info,
3034 		     DF_REF_REG_DEF,
3035                      flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3036 
3037       /* ... Fall through to handle uses ...  */
3038 
3039     default:
3040       break;
3041     }
3042 
3043   /* Recursively scan the operands of this expression.  */
3044   {
3045     const char *fmt = GET_RTX_FORMAT (code);
3046     int i;
3047 
3048     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3049       {
3050 	if (fmt[i] == 'e')
3051 	  {
3052 	    /* Tail recursive case: save a function call level.  */
3053 	    if (i == 0)
3054 	      {
3055 		loc = &XEXP (x, 0);
3056 		goto retry;
3057 	      }
3058 	    df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3059 			    bb, insn_info, flags);
3060 	  }
3061 	else if (fmt[i] == 'E')
3062 	  {
3063 	    int j;
3064 	    for (j = 0; j < XVECLEN (x, i); j++)
3065 	      df_uses_record (collection_rec,
3066 			      &XVECEXP (x, i, j), ref_type,
3067 			      bb, insn_info, flags);
3068 	  }
3069       }
3070   }
3071 
3072   return;
3073 }
3074 
3075 
3076 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses.  */
3077 
3078 static void
df_get_conditional_uses(struct df_collection_rec * collection_rec)3079 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3080 {
3081   unsigned int ix;
3082   df_ref ref;
3083 
3084   FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
3085     {
3086       if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3087         {
3088           df_ref use;
3089 
3090           use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3091 					 DF_REF_LOC (ref), DF_REF_BB (ref),
3092 					 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3093 					 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3094           DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3095         }
3096     }
3097 }
3098 
3099 
3100 /* Get call's extra defs and uses (track caller-saved registers). */
3101 
3102 static void
df_get_call_refs(struct df_collection_rec * collection_rec,basic_block bb,struct df_insn_info * insn_info,int flags)3103 df_get_call_refs (struct df_collection_rec *collection_rec,
3104                   basic_block bb,
3105                   struct df_insn_info *insn_info,
3106                   int flags)
3107 {
3108   rtx note;
3109   bool is_sibling_call;
3110   unsigned int i;
3111   HARD_REG_SET defs_generated;
3112   HARD_REG_SET fn_reg_set_usage;
3113 
3114   CLEAR_HARD_REG_SET (defs_generated);
3115   df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated);
3116   is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3117   get_call_reg_set_usage (insn_info->insn, &fn_reg_set_usage,
3118 			  regs_invalidated_by_call);
3119 
3120   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3121     {
3122       if (i == STACK_POINTER_REGNUM)
3123 	/* The stack ptr is used (honorarily) by a CALL insn.  */
3124 	df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3125 		       NULL, bb, insn_info, DF_REF_REG_USE,
3126 		       DF_REF_CALL_STACK_USAGE | flags);
3127       else if (global_regs[i])
3128 	{
3129 	  /* Calls to const functions cannot access any global registers and
3130 	     calls to pure functions cannot set them.  All other calls may
3131 	     reference any of the global registers, so they are recorded as
3132 	     used. */
3133 	  if (!RTL_CONST_CALL_P (insn_info->insn))
3134 	    {
3135 	      df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3136 			     NULL, bb, insn_info, DF_REF_REG_USE, flags);
3137 	      if (!RTL_PURE_CALL_P (insn_info->insn))
3138 		df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3139 			       NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3140 	    }
3141 	}
3142       else if (TEST_HARD_REG_BIT (fn_reg_set_usage, i)
3143 	       /* no clobbers for regs that are the result of the call */
3144 	       && !TEST_HARD_REG_BIT (defs_generated, i)
3145 	       && (!is_sibling_call
3146 		   || !bitmap_bit_p (df->exit_block_uses, i)
3147 		   || refers_to_regno_p (i, crtl->return_rtx)))
3148 	  df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3149 			 NULL, bb, insn_info, DF_REF_REG_DEF,
3150 			 DF_REF_MAY_CLOBBER | flags);
3151     }
3152 
3153   /* Record the registers used to pass arguments, and explicitly
3154      noted as clobbered.  */
3155   for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3156        note = XEXP (note, 1))
3157     {
3158       if (GET_CODE (XEXP (note, 0)) == USE)
3159         df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3160 			DF_REF_REG_USE, bb, insn_info, flags);
3161       else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3162 	{
3163 	  if (REG_P (XEXP (XEXP (note, 0), 0)))
3164 	    {
3165 	      unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3166 	      if (!TEST_HARD_REG_BIT (defs_generated, regno))
3167 		df_defs_record (collection_rec, XEXP (note, 0), bb,
3168 				insn_info, flags);
3169 	    }
3170 	  else
3171 	    df_uses_record (collection_rec, &XEXP (note, 0),
3172 		            DF_REF_REG_USE, bb, insn_info, flags);
3173 	}
3174     }
3175 
3176   return;
3177 }
3178 
3179 /* Collect all refs in the INSN. This function is free of any
3180    side-effect - it will create and return a lists of df_ref's in the
3181    COLLECTION_REC without putting those refs into existing ref chains
3182    and reg chains. */
3183 
3184 static void
df_insn_refs_collect(struct df_collection_rec * collection_rec,basic_block bb,struct df_insn_info * insn_info)3185 df_insn_refs_collect (struct df_collection_rec *collection_rec,
3186 		      basic_block bb, struct df_insn_info *insn_info)
3187 {
3188   rtx note;
3189   bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3190 
3191   /* Clear out the collection record.  */
3192   collection_rec->def_vec.truncate (0);
3193   collection_rec->use_vec.truncate (0);
3194   collection_rec->eq_use_vec.truncate (0);
3195   collection_rec->mw_vec.truncate (0);
3196 
3197   /* Process REG_EQUIV/REG_EQUAL notes.  */
3198   for (note = REG_NOTES (insn_info->insn); note;
3199        note = XEXP (note, 1))
3200     {
3201       switch (REG_NOTE_KIND (note))
3202         {
3203         case REG_EQUIV:
3204         case REG_EQUAL:
3205           df_uses_record (collection_rec,
3206                           &XEXP (note, 0), DF_REF_REG_USE,
3207                           bb, insn_info, DF_REF_IN_NOTE);
3208           break;
3209         case REG_NON_LOCAL_GOTO:
3210           /* The frame ptr is used by a non-local goto.  */
3211           df_ref_record (DF_REF_BASE, collection_rec,
3212                          regno_reg_rtx[FRAME_POINTER_REGNUM],
3213                          NULL, bb, insn_info,
3214                          DF_REF_REG_USE, 0);
3215 	  if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3216 	    df_ref_record (DF_REF_BASE, collection_rec,
3217 			   regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3218 			   NULL, bb, insn_info,
3219 			   DF_REF_REG_USE, 0);
3220           break;
3221         default:
3222           break;
3223         }
3224     }
3225 
3226   /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
3227      uses from CALL_INSN_FUNCTION_USAGE. */
3228   if (CALL_P (insn_info->insn))
3229     df_get_call_refs (collection_rec, bb, insn_info,
3230 		      (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3231 
3232   /* Record other defs.  These should be mostly for DF_REF_REGULAR, so
3233      that a qsort on the defs is unnecessary in most cases.  */
3234   df_defs_record (collection_rec,
3235 		  PATTERN (insn_info->insn), bb, insn_info, 0);
3236 
3237   /* Record the register uses.  */
3238   df_uses_record (collection_rec,
3239 		  &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3240 
3241   /* DF_REF_CONDITIONAL needs corresponding USES. */
3242   if (is_cond_exec)
3243     df_get_conditional_uses (collection_rec);
3244 
3245   df_canonize_collection_rec (collection_rec);
3246 }
3247 
3248 /* Recompute the luids for the insns in BB.  */
3249 
3250 void
df_recompute_luids(basic_block bb)3251 df_recompute_luids (basic_block bb)
3252 {
3253   rtx_insn *insn;
3254   int luid = 0;
3255 
3256   df_grow_insn_info ();
3257 
3258   /* Scan the block an insn at a time from beginning to end.  */
3259   FOR_BB_INSNS (bb, insn)
3260     {
3261       struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3262       /* Inserting labels does not always trigger the incremental
3263 	 rescanning.  */
3264       if (!insn_info)
3265 	{
3266 	  gcc_assert (!INSN_P (insn));
3267 	  insn_info = df_insn_create_insn_record (insn);
3268 	}
3269 
3270       DF_INSN_INFO_LUID (insn_info) = luid;
3271       if (INSN_P (insn))
3272 	luid++;
3273     }
3274 }
3275 
3276 
3277 /* Collect all artificial refs at the block level for BB and add them
3278    to COLLECTION_REC.  */
3279 
3280 static void
df_bb_refs_collect(struct df_collection_rec * collection_rec,basic_block bb)3281 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3282 {
3283   collection_rec->def_vec.truncate (0);
3284   collection_rec->use_vec.truncate (0);
3285   collection_rec->eq_use_vec.truncate (0);
3286   collection_rec->mw_vec.truncate (0);
3287 
3288   if (bb->index == ENTRY_BLOCK)
3289     {
3290       df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3291       return;
3292     }
3293   else if (bb->index == EXIT_BLOCK)
3294     {
3295       df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3296       return;
3297     }
3298 
3299   if (bb_has_eh_pred (bb))
3300     {
3301       unsigned int i;
3302       /* Mark the registers that will contain data for the handler.  */
3303       for (i = 0; ; ++i)
3304 	{
3305 	  unsigned regno = EH_RETURN_DATA_REGNO (i);
3306 	  if (regno == INVALID_REGNUM)
3307 	    break;
3308 	  df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3309 			 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3310 	}
3311     }
3312 
3313   /* Add the hard_frame_pointer if this block is the target of a
3314      non-local goto.  */
3315   if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3316     df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3317 		   bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3318 
3319   /* Add the artificial uses.  */
3320   if (bb->index >= NUM_FIXED_BLOCKS)
3321     {
3322       bitmap_iterator bi;
3323       unsigned int regno;
3324       bitmap au = bb_has_eh_pred (bb)
3325 	? &df->eh_block_artificial_uses
3326 	: &df->regular_block_artificial_uses;
3327 
3328       EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3329 	{
3330 	  df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3331 			 bb, NULL, DF_REF_REG_USE, 0);
3332 	}
3333     }
3334 
3335   df_canonize_collection_rec (collection_rec);
3336 }
3337 
3338 
3339 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS.  */
3340 
3341 void
df_bb_refs_record(int bb_index,bool scan_insns)3342 df_bb_refs_record (int bb_index, bool scan_insns)
3343 {
3344   basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
3345   rtx_insn *insn;
3346   int luid = 0;
3347 
3348   if (!df)
3349     return;
3350 
3351   df_collection_rec collection_rec;
3352   df_grow_bb_info (df_scan);
3353   if (scan_insns)
3354     /* Scan the block an insn at a time from beginning to end.  */
3355     FOR_BB_INSNS (bb, insn)
3356       {
3357 	struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3358 	gcc_assert (!insn_info);
3359 
3360 	insn_info = df_insn_create_insn_record (insn);
3361 	if (INSN_P (insn))
3362 	  {
3363 	    /* Record refs within INSN.  */
3364 	    DF_INSN_INFO_LUID (insn_info) = luid++;
3365 	    df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3366 	    df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
3367 	  }
3368 	DF_INSN_INFO_LUID (insn_info) = luid;
3369       }
3370 
3371   /* Other block level artificial refs */
3372   df_bb_refs_collect (&collection_rec, bb);
3373   df_refs_add_to_chains (&collection_rec, bb, NULL, copy_all);
3374 
3375   /* Now that the block has been processed, set the block as dirty so
3376      LR and LIVE will get it processed.  */
3377   df_set_bb_dirty (bb);
3378 }
3379 
3380 
3381 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3382    block. */
3383 
3384 static void
df_get_regular_block_artificial_uses(bitmap regular_block_artificial_uses)3385 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3386 {
3387 #ifdef EH_USES
3388   unsigned int i;
3389 #endif
3390 
3391   bitmap_clear (regular_block_artificial_uses);
3392 
3393   if (reload_completed)
3394     {
3395       if (frame_pointer_needed)
3396 	bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3397     }
3398   else
3399     /* Before reload, there are a few registers that must be forced
3400        live everywhere -- which might not already be the case for
3401        blocks within infinite loops.  */
3402     {
3403       unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3404 
3405       /* Any reference to any pseudo before reload is a potential
3406 	 reference of the frame pointer.  */
3407       bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3408 
3409       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3410 	bitmap_set_bit (regular_block_artificial_uses,
3411 			HARD_FRAME_POINTER_REGNUM);
3412 
3413       /* Pseudos with argument area equivalences may require
3414 	 reloading via the argument pointer.  */
3415       if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3416 	  && fixed_regs[ARG_POINTER_REGNUM])
3417 	bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3418 
3419       /* Any constant, or pseudo with constant equivalences, may
3420 	 require reloading from memory using the pic register.  */
3421       if (picreg != INVALID_REGNUM
3422 	  && fixed_regs[picreg])
3423 	bitmap_set_bit (regular_block_artificial_uses, picreg);
3424     }
3425   /* The all-important stack pointer must always be live.  */
3426   bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3427 
3428 #ifdef EH_USES
3429   /* EH_USES registers are used:
3430      1) at all insns that might throw (calls or with -fnon-call-exceptions
3431 	trapping insns)
3432      2) in all EH edges
3433      3) to support backtraces and/or debugging, anywhere between their
3434 	initialization and where they the saved registers are restored
3435 	from them, including the cases where we don't reach the epilogue
3436 	(noreturn call or infinite loop).  */
3437   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3438     if (EH_USES (i))
3439       bitmap_set_bit (regular_block_artificial_uses, i);
3440 #endif
3441 }
3442 
3443 
3444 /* Get the artificial use set for an eh block. */
3445 
3446 static void
df_get_eh_block_artificial_uses(bitmap eh_block_artificial_uses)3447 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3448 {
3449   bitmap_clear (eh_block_artificial_uses);
3450 
3451   /* The following code (down through the arg_pointer setting APPEARS
3452      to be necessary because there is nothing that actually
3453      describes what the exception handling code may actually need
3454      to keep alive.  */
3455   if (reload_completed)
3456     {
3457       if (frame_pointer_needed)
3458 	{
3459 	  bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3460 	  if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3461 	    bitmap_set_bit (eh_block_artificial_uses,
3462 			    HARD_FRAME_POINTER_REGNUM);
3463 	}
3464       if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3465 	  && fixed_regs[ARG_POINTER_REGNUM])
3466 	bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3467     }
3468 }
3469 
3470 
3471 
3472 /*----------------------------------------------------------------------------
3473    Specialized hard register scanning functions.
3474 ----------------------------------------------------------------------------*/
3475 
3476 
3477 /* Mark a register in SET.  Hard registers in large modes get all
3478    of their component registers set as well.  */
3479 
3480 static void
df_mark_reg(rtx reg,void * vset)3481 df_mark_reg (rtx reg, void *vset)
3482 {
3483   bitmap_set_range ((bitmap) vset, REGNO (reg), REG_NREGS (reg));
3484 }
3485 
3486 
3487 /* Set the bit for regs that are considered being defined at the entry. */
3488 
3489 static void
df_get_entry_block_def_set(bitmap entry_block_defs)3490 df_get_entry_block_def_set (bitmap entry_block_defs)
3491 {
3492   rtx r;
3493   int i;
3494 
3495   bitmap_clear (entry_block_defs);
3496 
3497   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3498     {
3499       if (global_regs[i])
3500 	bitmap_set_bit (entry_block_defs, i);
3501       if (FUNCTION_ARG_REGNO_P (i))
3502 	bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3503     }
3504 
3505   /* The always important stack pointer.  */
3506   bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3507 
3508   /* Once the prologue has been generated, all of these registers
3509      should just show up in the first regular block.  */
3510   if (targetm.have_prologue () && epilogue_completed)
3511     {
3512       /* Defs for the callee saved registers are inserted so that the
3513 	 pushes have some defining location.  */
3514       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3515 	if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3516 	  bitmap_set_bit (entry_block_defs, i);
3517     }
3518 
3519   r = targetm.calls.struct_value_rtx (current_function_decl, true);
3520   if (r && REG_P (r))
3521     bitmap_set_bit (entry_block_defs, REGNO (r));
3522 
3523   /* If the function has an incoming STATIC_CHAIN, it has to show up
3524      in the entry def set.  */
3525   r = targetm.calls.static_chain (current_function_decl, true);
3526   if (r && REG_P (r))
3527     bitmap_set_bit (entry_block_defs, REGNO (r));
3528 
3529   if ((!reload_completed) || frame_pointer_needed)
3530     {
3531       /* Any reference to any pseudo before reload is a potential
3532 	 reference of the frame pointer.  */
3533       bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3534 
3535       /* If they are different, also mark the hard frame pointer as live.  */
3536       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3537 	  && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3538 	bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3539     }
3540 
3541   /* These registers are live everywhere.  */
3542   if (!reload_completed)
3543     {
3544       /* Pseudos with argument area equivalences may require
3545 	 reloading via the argument pointer.  */
3546       if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3547 	  && fixed_regs[ARG_POINTER_REGNUM])
3548 	bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3549 
3550       /* Any constant, or pseudo with constant equivalences, may
3551 	 require reloading from memory using the pic register.  */
3552       unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3553       if (picreg != INVALID_REGNUM
3554 	  && fixed_regs[picreg])
3555 	bitmap_set_bit (entry_block_defs, picreg);
3556     }
3557 
3558 #ifdef INCOMING_RETURN_ADDR_RTX
3559   if (REG_P (INCOMING_RETURN_ADDR_RTX))
3560     bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3561 #endif
3562 
3563   targetm.extra_live_on_entry (entry_block_defs);
3564 }
3565 
3566 
3567 /* Return the (conservative) set of hard registers that are defined on
3568    entry to the function.
3569    It uses df->entry_block_defs to determine which register
3570    reference to include.  */
3571 
3572 static void
df_entry_block_defs_collect(struct df_collection_rec * collection_rec,bitmap entry_block_defs)3573 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3574 			     bitmap entry_block_defs)
3575 {
3576   unsigned int i;
3577   bitmap_iterator bi;
3578 
3579   EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3580     {
3581       df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3582 		     ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_DEF, 0);
3583     }
3584 
3585   df_canonize_collection_rec (collection_rec);
3586 }
3587 
3588 
3589 /* Record the (conservative) set of hard registers that are defined on
3590    entry to the function.  */
3591 
3592 static void
df_record_entry_block_defs(bitmap entry_block_defs)3593 df_record_entry_block_defs (bitmap entry_block_defs)
3594 {
3595   struct df_collection_rec collection_rec;
3596   df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3597 
3598   /* Process bb_refs chain */
3599   df_refs_add_to_chains (&collection_rec,
3600 			 BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK),
3601 			 NULL,
3602 			 copy_defs);
3603 }
3604 
3605 
3606 /* Update the defs in the entry block.  */
3607 
3608 void
df_update_entry_block_defs(void)3609 df_update_entry_block_defs (void)
3610 {
3611   bitmap_head refs;
3612   bool changed = false;
3613 
3614   bitmap_initialize (&refs, &df_bitmap_obstack);
3615   df_get_entry_block_def_set (&refs);
3616   if (df->entry_block_defs)
3617     {
3618       if (!bitmap_equal_p (df->entry_block_defs, &refs))
3619 	{
3620 	  struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3621 	  df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3622 	  df_ref_chain_delete (bb_info->artificial_defs);
3623 	  bb_info->artificial_defs = NULL;
3624 	  changed = true;
3625 	}
3626     }
3627   else
3628     {
3629       struct df_scan_problem_data *problem_data
3630 	= (struct df_scan_problem_data *) df_scan->problem_data;
3631 	gcc_unreachable ();
3632       df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3633       changed = true;
3634     }
3635 
3636   if (changed)
3637     {
3638       df_record_entry_block_defs (&refs);
3639       bitmap_copy (df->entry_block_defs, &refs);
3640       df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
3641     }
3642   bitmap_clear (&refs);
3643 }
3644 
3645 
3646 /* Set the bit for regs that are considered being used at the exit. */
3647 
3648 static void
df_get_exit_block_use_set(bitmap exit_block_uses)3649 df_get_exit_block_use_set (bitmap exit_block_uses)
3650 {
3651   unsigned int i;
3652   unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3653 
3654   bitmap_clear (exit_block_uses);
3655 
3656   /* Stack pointer is always live at the exit.  */
3657   bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3658 
3659   /* Mark the frame pointer if needed at the end of the function.
3660      If we end up eliminating it, it will be removed from the live
3661      list of each basic block by reload.  */
3662 
3663   if ((!reload_completed) || frame_pointer_needed)
3664     {
3665       bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3666 
3667       /* If they are different, also mark the hard frame pointer as live.  */
3668       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3669 	  && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3670 	bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3671     }
3672 
3673   /* Many architectures have a GP register even without flag_pic.
3674      Assume the pic register is not in use, or will be handled by
3675      other means, if it is not fixed.  */
3676   if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3677       && picreg != INVALID_REGNUM
3678       && fixed_regs[picreg])
3679     bitmap_set_bit (exit_block_uses, picreg);
3680 
3681   /* Mark all global registers, and all registers used by the
3682      epilogue as being live at the end of the function since they
3683      may be referenced by our caller.  */
3684   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3685     if (global_regs[i] || EPILOGUE_USES (i))
3686       bitmap_set_bit (exit_block_uses, i);
3687 
3688   if (targetm.have_epilogue () && epilogue_completed)
3689     {
3690       /* Mark all call-saved registers that we actually used.  */
3691       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3692 	if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3693 	    && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3694 	  bitmap_set_bit (exit_block_uses, i);
3695     }
3696 
3697   /* Mark the registers that will contain data for the handler.  */
3698   if (reload_completed && crtl->calls_eh_return)
3699     for (i = 0; ; ++i)
3700       {
3701 	unsigned regno = EH_RETURN_DATA_REGNO (i);
3702 	if (regno == INVALID_REGNUM)
3703 	  break;
3704 	bitmap_set_bit (exit_block_uses, regno);
3705       }
3706 
3707 #ifdef EH_RETURN_STACKADJ_RTX
3708   if ((!targetm.have_epilogue () || ! epilogue_completed)
3709       && crtl->calls_eh_return)
3710     {
3711       rtx tmp = EH_RETURN_STACKADJ_RTX;
3712       if (tmp && REG_P (tmp))
3713 	df_mark_reg (tmp, exit_block_uses);
3714     }
3715 #endif
3716 
3717   if ((!targetm.have_epilogue () || ! epilogue_completed)
3718       && crtl->calls_eh_return)
3719     {
3720       rtx tmp = EH_RETURN_HANDLER_RTX;
3721       if (tmp && REG_P (tmp))
3722 	df_mark_reg (tmp, exit_block_uses);
3723     }
3724 
3725   /* Mark function return value.  */
3726   diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3727 }
3728 
3729 
3730 /* Return the refs of hard registers that are used in the exit block.
3731    It uses df->exit_block_uses to determine register to include.  */
3732 
3733 static void
df_exit_block_uses_collect(struct df_collection_rec * collection_rec,bitmap exit_block_uses)3734 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3735 {
3736   unsigned int i;
3737   bitmap_iterator bi;
3738 
3739   EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3740     df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3741 		   EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3742 
3743   /* It is deliberate that this is not put in the exit block uses but
3744      I do not know why.  */
3745   if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3746       && reload_completed
3747       && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3748       && bb_has_eh_pred (EXIT_BLOCK_PTR_FOR_FN (cfun))
3749       && fixed_regs[ARG_POINTER_REGNUM])
3750     df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3751 		   EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3752 
3753   df_canonize_collection_rec (collection_rec);
3754 }
3755 
3756 
3757 /* Record the set of hard registers that are used in the exit block.
3758    It uses df->exit_block_uses to determine which bit to include.  */
3759 
3760 static void
df_record_exit_block_uses(bitmap exit_block_uses)3761 df_record_exit_block_uses (bitmap exit_block_uses)
3762 {
3763   struct df_collection_rec collection_rec;
3764   df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3765 
3766   /* Process bb_refs chain */
3767   df_refs_add_to_chains (&collection_rec,
3768 			 BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK),
3769 			 NULL,
3770 			 copy_uses);
3771 }
3772 
3773 
3774 /* Update the uses in the exit block.  */
3775 
3776 void
df_update_exit_block_uses(void)3777 df_update_exit_block_uses (void)
3778 {
3779   bitmap_head refs;
3780   bool changed = false;
3781 
3782   bitmap_initialize (&refs, &df_bitmap_obstack);
3783   df_get_exit_block_use_set (&refs);
3784   if (df->exit_block_uses)
3785     {
3786       if (!bitmap_equal_p (df->exit_block_uses, &refs))
3787 	{
3788 	  struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3789 	  df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3790 	  df_ref_chain_delete (bb_info->artificial_uses);
3791 	  bb_info->artificial_uses = NULL;
3792 	  changed = true;
3793 	}
3794     }
3795   else
3796     {
3797       struct df_scan_problem_data *problem_data
3798 	= (struct df_scan_problem_data *) df_scan->problem_data;
3799 	gcc_unreachable ();
3800       df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3801       changed = true;
3802     }
3803 
3804   if (changed)
3805     {
3806       df_record_exit_block_uses (&refs);
3807       bitmap_copy (df->exit_block_uses,& refs);
3808       df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
3809     }
3810   bitmap_clear (&refs);
3811 }
3812 
3813 static bool initialized = false;
3814 
3815 
3816 /* Initialize some platform specific structures.  */
3817 
3818 void
df_hard_reg_init(void)3819 df_hard_reg_init (void)
3820 {
3821 #ifdef ELIMINABLE_REGS
3822   int i;
3823   static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3824 #endif
3825   if (initialized)
3826     return;
3827 
3828   /* Record which registers will be eliminated.  We use this in
3829      mark_used_regs.  */
3830   CLEAR_HARD_REG_SET (elim_reg_set);
3831 
3832 #ifdef ELIMINABLE_REGS
3833   for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3834     SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3835 #else
3836   SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3837 #endif
3838 
3839   initialized = true;
3840 }
3841 
3842 
3843 /* Recompute the parts of scanning that are based on regs_ever_live
3844    because something changed in that array.  */
3845 
3846 void
df_update_entry_exit_and_calls(void)3847 df_update_entry_exit_and_calls (void)
3848 {
3849   basic_block bb;
3850 
3851   df_update_entry_block_defs ();
3852   df_update_exit_block_uses ();
3853 
3854   /* The call insns need to be rescanned because there may be changes
3855      in the set of registers clobbered across the call.  */
3856   FOR_EACH_BB_FN (bb, cfun)
3857     {
3858       rtx_insn *insn;
3859       FOR_BB_INSNS (bb, insn)
3860 	{
3861 	  if (INSN_P (insn) && CALL_P (insn))
3862 	    df_insn_rescan (insn);
3863 	}
3864     }
3865 }
3866 
3867 
3868 /* Return true if hard REG is actually used in the some instruction.
3869    There are a fair number of conditions that affect the setting of
3870    this array.  See the comment in df.h for df->hard_regs_live_count
3871    for the conditions that this array is set. */
3872 
3873 bool
df_hard_reg_used_p(unsigned int reg)3874 df_hard_reg_used_p (unsigned int reg)
3875 {
3876   return df->hard_regs_live_count[reg] != 0;
3877 }
3878 
3879 
3880 /* A count of the number of times REG is actually used in the some
3881    instruction.  There are a fair number of conditions that affect the
3882    setting of this array.  See the comment in df.h for
3883    df->hard_regs_live_count for the conditions that this array is
3884    set. */
3885 
3886 
3887 unsigned int
df_hard_reg_used_count(unsigned int reg)3888 df_hard_reg_used_count (unsigned int reg)
3889 {
3890   return df->hard_regs_live_count[reg];
3891 }
3892 
3893 
3894 /* Get the value of regs_ever_live[REGNO].  */
3895 
3896 bool
df_regs_ever_live_p(unsigned int regno)3897 df_regs_ever_live_p (unsigned int regno)
3898 {
3899   return regs_ever_live[regno];
3900 }
3901 
3902 
3903 /* Set regs_ever_live[REGNO] to VALUE.  If this cause regs_ever_live
3904    to change, schedule that change for the next update.  */
3905 
3906 void
df_set_regs_ever_live(unsigned int regno,bool value)3907 df_set_regs_ever_live (unsigned int regno, bool value)
3908 {
3909   if (regs_ever_live[regno] == value)
3910     return;
3911 
3912   regs_ever_live[regno] = value;
3913   if (df)
3914     df->redo_entry_and_exit = true;
3915 }
3916 
3917 
3918 /* Compute "regs_ever_live" information from the underlying df
3919    information.  Set the vector to all false if RESET.  */
3920 
3921 void
df_compute_regs_ever_live(bool reset)3922 df_compute_regs_ever_live (bool reset)
3923 {
3924   unsigned int i;
3925   bool changed = df->redo_entry_and_exit;
3926 
3927   if (reset)
3928     memset (regs_ever_live, 0, sizeof (regs_ever_live));
3929 
3930   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3931     if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3932       {
3933 	regs_ever_live[i] = true;
3934 	changed = true;
3935       }
3936   if (changed)
3937     df_update_entry_exit_and_calls ();
3938   df->redo_entry_and_exit = false;
3939 }
3940 
3941 
3942 /*----------------------------------------------------------------------------
3943   Dataflow ref information verification functions.
3944 
3945   df_reg_chain_mark (refs, regno, is_def, is_eq_use)
3946   df_reg_chain_verify_unmarked (refs)
3947   df_refs_verify (vec<stack, va_df_ref>, ref*, bool)
3948   df_mws_verify (mw*, mw*, bool)
3949   df_insn_refs_verify (collection_rec, bb, insn, bool)
3950   df_bb_refs_verify (bb, refs, bool)
3951   df_bb_verify (bb)
3952   df_exit_block_bitmap_verify (bool)
3953   df_entry_block_bitmap_verify (bool)
3954   df_scan_verify ()
3955 ----------------------------------------------------------------------------*/
3956 
3957 
3958 /* Mark all refs in the reg chain.  Verify that all of the registers
3959 are in the correct chain.  */
3960 
3961 static unsigned int
df_reg_chain_mark(df_ref refs,unsigned int regno,bool is_def,bool is_eq_use)3962 df_reg_chain_mark (df_ref refs, unsigned int regno,
3963 		   bool is_def, bool is_eq_use)
3964 {
3965   unsigned int count = 0;
3966   df_ref ref;
3967   for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
3968     {
3969       gcc_assert (!DF_REF_IS_REG_MARKED (ref));
3970 
3971       /* If there are no def-use or use-def chains, make sure that all
3972 	 of the chains are clear.  */
3973       if (!df_chain)
3974 	gcc_assert (!DF_REF_CHAIN (ref));
3975 
3976       /* Check to make sure the ref is in the correct chain.  */
3977       gcc_assert (DF_REF_REGNO (ref) == regno);
3978       if (is_def)
3979 	gcc_assert (DF_REF_REG_DEF_P (ref));
3980       else
3981 	gcc_assert (!DF_REF_REG_DEF_P (ref));
3982 
3983       if (is_eq_use)
3984 	gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
3985       else
3986 	gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
3987 
3988       if (DF_REF_NEXT_REG (ref))
3989 	gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
3990       count++;
3991       DF_REF_REG_MARK (ref);
3992     }
3993   return count;
3994 }
3995 
3996 
3997 /* Verify that all of the registers in the chain are unmarked.  */
3998 
3999 static void
df_reg_chain_verify_unmarked(df_ref refs)4000 df_reg_chain_verify_unmarked (df_ref refs)
4001 {
4002   df_ref ref;
4003   for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4004     gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4005 }
4006 
4007 
4008 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4009 
4010 static bool
df_refs_verify(const vec<df_ref,va_heap> * new_rec,df_ref old_rec,bool abort_if_fail)4011 df_refs_verify (const vec<df_ref, va_heap> *new_rec, df_ref old_rec,
4012 		bool abort_if_fail)
4013 {
4014   unsigned int ix;
4015   df_ref new_ref;
4016 
4017   FOR_EACH_VEC_ELT (*new_rec, ix, new_ref)
4018     {
4019       if (old_rec == NULL || !df_ref_equal_p (new_ref, old_rec))
4020 	{
4021 	  if (abort_if_fail)
4022 	    gcc_assert (0);
4023 	  else
4024 	    return false;
4025 	}
4026 
4027       /* Abort if fail is called from the function level verifier.  If
4028 	 that is the context, mark this reg as being seem.  */
4029       if (abort_if_fail)
4030 	{
4031 	  gcc_assert (DF_REF_IS_REG_MARKED (old_rec));
4032 	  DF_REF_REG_UNMARK (old_rec);
4033 	}
4034 
4035       old_rec = DF_REF_NEXT_LOC (old_rec);
4036     }
4037 
4038   if (abort_if_fail)
4039     gcc_assert (old_rec == NULL);
4040   else
4041     return old_rec == NULL;
4042   return false;
4043 }
4044 
4045 
4046 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4047 
4048 static bool
df_mws_verify(const vec<df_mw_hardreg *,va_heap> * new_rec,struct df_mw_hardreg * old_rec,bool abort_if_fail)4049 df_mws_verify (const vec<df_mw_hardreg *, va_heap> *new_rec,
4050 	       struct df_mw_hardreg *old_rec,
4051 	       bool abort_if_fail)
4052 {
4053   unsigned int ix;
4054   struct df_mw_hardreg *new_reg;
4055 
4056   FOR_EACH_VEC_ELT (*new_rec, ix, new_reg)
4057     {
4058       if (old_rec == NULL || !df_mw_equal_p (new_reg, old_rec))
4059 	{
4060 	  if (abort_if_fail)
4061 	    gcc_assert (0);
4062 	  else
4063 	    return false;
4064 	}
4065       old_rec = DF_MWS_NEXT (old_rec);
4066     }
4067 
4068   if (abort_if_fail)
4069     gcc_assert (old_rec == NULL);
4070   else
4071     return old_rec == NULL;
4072   return false;
4073 }
4074 
4075 
4076 /* Return true if the existing insn refs information is complete and
4077    correct. Otherwise (i.e. if there's any missing or extra refs),
4078    return the correct df_ref chain in REFS_RETURN.
4079 
4080    If ABORT_IF_FAIL, leave the refs that are verified (already in the
4081    ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4082    verification mode instead of the whole function, so unmark
4083    everything.
4084 
4085    If ABORT_IF_FAIL is set, this function never returns false.  */
4086 
4087 static bool
df_insn_refs_verify(struct df_collection_rec * collection_rec,basic_block bb,rtx_insn * insn,bool abort_if_fail)4088 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4089 		     basic_block bb,
4090                      rtx_insn *insn,
4091 		     bool abort_if_fail)
4092 {
4093   bool ret1, ret2, ret3, ret4;
4094   unsigned int uid = INSN_UID (insn);
4095   struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4096 
4097   df_insn_refs_collect (collection_rec, bb, insn_info);
4098 
4099   /* Unfortunately we cannot opt out early if one of these is not
4100      right because the marks will not get cleared.  */
4101   ret1 = df_refs_verify (&collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4102 			 abort_if_fail);
4103   ret2 = df_refs_verify (&collection_rec->use_vec, DF_INSN_UID_USES (uid),
4104 			 abort_if_fail);
4105   ret3 = df_refs_verify (&collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4106 			 abort_if_fail);
4107   ret4 = df_mws_verify (&collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4108 		       abort_if_fail);
4109   return (ret1 && ret2 && ret3 && ret4);
4110 }
4111 
4112 
4113 /* Return true if all refs in the basic block are correct and complete.
4114    Due to df_ref_chain_verify, it will cause all refs
4115    that are verified to have DF_REF_MARK bit set.  */
4116 
4117 static bool
df_bb_verify(basic_block bb)4118 df_bb_verify (basic_block bb)
4119 {
4120   rtx_insn *insn;
4121   struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4122   struct df_collection_rec collection_rec;
4123 
4124   gcc_assert (bb_info);
4125 
4126   /* Scan the block, one insn at a time, from beginning to end.  */
4127   FOR_BB_INSNS_REVERSE (bb, insn)
4128     {
4129       if (!INSN_P (insn))
4130         continue;
4131       df_insn_refs_verify (&collection_rec, bb, insn, true);
4132       df_free_collection_rec (&collection_rec);
4133     }
4134 
4135   /* Do the artificial defs and uses.  */
4136   df_bb_refs_collect (&collection_rec, bb);
4137   df_refs_verify (&collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4138   df_refs_verify (&collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4139   df_free_collection_rec (&collection_rec);
4140 
4141   return true;
4142 }
4143 
4144 
4145 /* Returns true if the entry block has correct and complete df_ref set.
4146    If not it either aborts if ABORT_IF_FAIL is true or returns false.  */
4147 
4148 static bool
df_entry_block_bitmap_verify(bool abort_if_fail)4149 df_entry_block_bitmap_verify (bool abort_if_fail)
4150 {
4151   bitmap_head entry_block_defs;
4152   bool is_eq;
4153 
4154   bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
4155   df_get_entry_block_def_set (&entry_block_defs);
4156 
4157   is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
4158 
4159   if (!is_eq && abort_if_fail)
4160     {
4161       fprintf (stderr, "entry_block_defs = ");
4162       df_print_regset (stderr, &entry_block_defs);
4163       fprintf (stderr, "df->entry_block_defs = ");
4164       df_print_regset (stderr, df->entry_block_defs);
4165       gcc_assert (0);
4166     }
4167 
4168   bitmap_clear (&entry_block_defs);
4169 
4170   return is_eq;
4171 }
4172 
4173 
4174 /* Returns true if the exit block has correct and complete df_ref set.
4175    If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4176 
4177 static bool
df_exit_block_bitmap_verify(bool abort_if_fail)4178 df_exit_block_bitmap_verify (bool abort_if_fail)
4179 {
4180   bitmap_head exit_block_uses;
4181   bool is_eq;
4182 
4183   bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
4184   df_get_exit_block_use_set (&exit_block_uses);
4185 
4186   is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
4187 
4188   if (!is_eq && abort_if_fail)
4189     {
4190       fprintf (stderr, "exit_block_uses = ");
4191       df_print_regset (stderr, &exit_block_uses);
4192       fprintf (stderr, "df->exit_block_uses = ");
4193       df_print_regset (stderr, df->exit_block_uses);
4194       gcc_assert (0);
4195     }
4196 
4197   bitmap_clear (&exit_block_uses);
4198 
4199   return is_eq;
4200 }
4201 
4202 
4203 /* Return true if df_ref information for all insns in all blocks are
4204    correct and complete.  */
4205 
4206 void
df_scan_verify(void)4207 df_scan_verify (void)
4208 {
4209   unsigned int i;
4210   basic_block bb;
4211   bitmap_head regular_block_artificial_uses;
4212   bitmap_head eh_block_artificial_uses;
4213 
4214   if (!df)
4215     return;
4216 
4217   /* Verification is a 4 step process. */
4218 
4219   /* (1) All of the refs are marked by going through the reg chains.  */
4220   for (i = 0; i < DF_REG_SIZE (df); i++)
4221     {
4222       gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4223 		  == DF_REG_DEF_COUNT (i));
4224       gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4225 		  == DF_REG_USE_COUNT (i));
4226       gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4227 		  == DF_REG_EQ_USE_COUNT (i));
4228     }
4229 
4230   /* (2) There are various bitmaps whose value may change over the
4231      course of the compilation.  This step recomputes them to make
4232      sure that they have not slipped out of date.  */
4233   bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
4234   bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
4235 
4236   df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
4237   df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
4238 
4239   bitmap_ior_into (&eh_block_artificial_uses,
4240 		   &regular_block_artificial_uses);
4241 
4242   /* Check artificial_uses bitmaps didn't change. */
4243   gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
4244 			      &df->regular_block_artificial_uses));
4245   gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
4246 			      &df->eh_block_artificial_uses));
4247 
4248   bitmap_clear (&regular_block_artificial_uses);
4249   bitmap_clear (&eh_block_artificial_uses);
4250 
4251   /* Verify entry block and exit block. These only verify the bitmaps,
4252      the refs are verified in df_bb_verify.  */
4253   df_entry_block_bitmap_verify (true);
4254   df_exit_block_bitmap_verify (true);
4255 
4256   /* (3) All of the insns in all of the blocks are traversed and the
4257      marks are cleared both in the artificial refs attached to the
4258      blocks and the real refs inside the insns.  It is a failure to
4259      clear a mark that has not been set as this means that the ref in
4260      the block or insn was not in the reg chain.  */
4261 
4262   FOR_ALL_BB_FN (bb, cfun)
4263     df_bb_verify (bb);
4264 
4265   /* (4) See if all reg chains are traversed a second time.  This time
4266      a check is made that the marks are clear. A set mark would be a
4267      from a reg that is not in any insn or basic block.  */
4268 
4269   for (i = 0; i < DF_REG_SIZE (df); i++)
4270     {
4271       df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4272       df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4273       df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4274     }
4275 }
4276