1 /* Convert RTL to assembler code and output it, for GNU compiler.
2    Copyright (C) 1987-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This is the final pass of the compiler.
21    It looks at the rtl code for a function and outputs assembler code.
22 
23    Call `final_start_function' to output the assembler code for function entry,
24    `final' to output assembler code for some RTL code,
25    `final_end_function' to output assembler code for function exit.
26    If a function is compiled in several pieces, each piece is
27    output separately with `final'.
28 
29    Some optimizations are also done at this level.
30    Move instructions that were made unnecessary by good register allocation
31    are detected and omitted from the output.  (Though most of these
32    are removed by the last jump pass.)
33 
34    Instructions to set the condition codes are omitted when it can be
35    seen that the condition codes already had the desired values.
36 
37    In some cases it is sufficient if the inherited condition codes
38    have related values, but this may require the following insn
39    (the one that tests the condition codes) to be modified.
40 
41    The code for the function prologue and epilogue are generated
42    directly in assembler by the target functions function_prologue and
43    function_epilogue.  Those instructions never exist as rtl.  */
44 
45 #include "config.h"
46 #define INCLUDE_ALGORITHM /* reverse */
47 #include "system.h"
48 #include "coretypes.h"
49 #include "backend.h"
50 #include "target.h"
51 #include "rtl.h"
52 #include "tree.h"
53 #include "cfghooks.h"
54 #include "df.h"
55 #include "memmodel.h"
56 #include "tm_p.h"
57 #include "insn-config.h"
58 #include "regs.h"
59 #include "emit-rtl.h"
60 #include "recog.h"
61 #include "cgraph.h"
62 #include "tree-pretty-print.h" /* for dump_function_header */
63 #include "varasm.h"
64 #include "insn-attr.h"
65 #include "conditions.h"
66 #include "flags.h"
67 #include "output.h"
68 #include "except.h"
69 #include "rtl-error.h"
70 #include "toplev.h" /* exact_log2, floor_log2 */
71 #include "reload.h"
72 #include "intl.h"
73 #include "cfgrtl.h"
74 #include "debug.h"
75 #include "tree-pass.h"
76 #include "tree-ssa.h"
77 #include "cfgloop.h"
78 #include "params.h"
79 #include "stringpool.h"
80 #include "attribs.h"
81 #include "asan.h"
82 #include "rtl-iter.h"
83 #include "print-rtl.h"
84 
85 #ifdef XCOFF_DEBUGGING_INFO
86 #include "xcoffout.h"		/* Needed for external data declarations.  */
87 #endif
88 
89 #include "dwarf2out.h"
90 
91 #ifdef DBX_DEBUGGING_INFO
92 #include "dbxout.h"
93 #endif
94 
95 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
96    So define a null default for it to save conditionalization later.  */
97 #ifndef CC_STATUS_INIT
98 #define CC_STATUS_INIT
99 #endif
100 
101 /* Is the given character a logical line separator for the assembler?  */
102 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
103 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
104 #endif
105 
106 #ifndef JUMP_TABLES_IN_TEXT_SECTION
107 #define JUMP_TABLES_IN_TEXT_SECTION 0
108 #endif
109 
110 /* Bitflags used by final_scan_insn.  */
111 #define SEEN_NOTE	1
112 #define SEEN_EMITTED	2
113 #define SEEN_NEXT_VIEW	4
114 
115 /* Last insn processed by final_scan_insn.  */
116 static rtx_insn *debug_insn;
117 rtx_insn *current_output_insn;
118 
119 /* Line number of last NOTE.  */
120 static int last_linenum;
121 
122 /* Column number of last NOTE.  */
123 static int last_columnnum;
124 
125 /* Discriminator written to assembly.  */
126 static int last_discriminator;
127 
128 /* Discriminator to be written to assembly for current instruction.
129    Note: actual usage depends on loc_discriminator_kind setting.  */
130 static int discriminator;
131 static inline int compute_discriminator (location_t loc);
132 
133 /* Discriminator identifying current basic block among others sharing
134    the same locus.  */
135 static int bb_discriminator;
136 
137 /* Basic block discriminator for previous instruction.  */
138 static int last_bb_discriminator;
139 
140 /* Highest line number in current block.  */
141 static int high_block_linenum;
142 
143 /* Likewise for function.  */
144 static int high_function_linenum;
145 
146 /* Filename of last NOTE.  */
147 static const char *last_filename;
148 
149 /* Override filename, line and column number.  */
150 static const char *override_filename;
151 static int override_linenum;
152 static int override_columnnum;
153 static int override_discriminator;
154 
155 /* Whether to force emission of a line note before the next insn.  */
156 static bool force_source_line = false;
157 
158 extern const int length_unit_log; /* This is defined in insn-attrtab.c.  */
159 
160 /* Nonzero while outputting an `asm' with operands.
161    This means that inconsistencies are the user's fault, so don't die.
162    The precise value is the insn being output, to pass to error_for_asm.  */
163 const rtx_insn *this_is_asm_operands;
164 
165 /* Number of operands of this insn, for an `asm' with operands.  */
166 static unsigned int insn_noperands;
167 
168 /* Compare optimization flag.  */
169 
170 static rtx last_ignored_compare = 0;
171 
172 /* Assign a unique number to each insn that is output.
173    This can be used to generate unique local labels.  */
174 
175 static int insn_counter = 0;
176 
177 /* This variable contains machine-dependent flags (defined in tm.h)
178    set and examined by output routines
179    that describe how to interpret the condition codes properly.  */
180 
181 CC_STATUS cc_status;
182 
183 /* During output of an insn, this contains a copy of cc_status
184    from before the insn.  */
185 
186 CC_STATUS cc_prev_status;
187 
188 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen.  */
189 
190 static int block_depth;
191 
192 /* Nonzero if have enabled APP processing of our assembler output.  */
193 
194 static int app_on;
195 
196 /* If we are outputting an insn sequence, this contains the sequence rtx.
197    Zero otherwise.  */
198 
199 rtx_sequence *final_sequence;
200 
201 #ifdef ASSEMBLER_DIALECT
202 
203 /* Number of the assembler dialect to use, starting at 0.  */
204 static int dialect_number;
205 #endif
206 
207 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern.  */
208 rtx current_insn_predicate;
209 
210 /* True if printing into -fdump-final-insns= dump.  */
211 bool final_insns_dump_p;
212 
213 /* True if profile_function should be called, but hasn't been called yet.  */
214 static bool need_profile_function;
215 
216 static int asm_insn_count (rtx);
217 static void profile_function (FILE *);
218 static void profile_after_prologue (FILE *);
219 static bool notice_source_line (rtx_insn *, bool *);
220 static rtx walk_alter_subreg (rtx *, bool *);
221 static void output_asm_name (void);
222 static void output_alternate_entry_point (FILE *, rtx_insn *);
223 static tree get_mem_expr_from_op (rtx, int *);
224 static void output_asm_operand_names (rtx *, int *, int);
225 #ifdef LEAF_REGISTERS
226 static void leaf_renumber_regs (rtx_insn *);
227 #endif
228 #if HAVE_cc0
229 static int alter_cond (rtx);
230 #endif
231 static int align_fuzz (rtx, rtx, int, unsigned);
232 static void collect_fn_hard_reg_usage (void);
233 static tree get_call_fndecl (rtx_insn *);
234 
235 /* Initialize data in final at the beginning of a compilation.  */
236 
237 void
init_final(const char * filename ATTRIBUTE_UNUSED)238 init_final (const char *filename ATTRIBUTE_UNUSED)
239 {
240   app_on = 0;
241   final_sequence = 0;
242 
243 #ifdef ASSEMBLER_DIALECT
244   dialect_number = ASSEMBLER_DIALECT;
245 #endif
246 }
247 
248 /* Default target function prologue and epilogue assembler output.
249 
250    If not overridden for epilogue code, then the function body itself
251    contains return instructions wherever needed.  */
252 void
default_function_pro_epilogue(FILE *)253 default_function_pro_epilogue (FILE *)
254 {
255 }
256 
257 void
default_function_switched_text_sections(FILE * file ATTRIBUTE_UNUSED,tree decl ATTRIBUTE_UNUSED,bool new_is_cold ATTRIBUTE_UNUSED)258 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
259 					 tree decl ATTRIBUTE_UNUSED,
260 					 bool new_is_cold ATTRIBUTE_UNUSED)
261 {
262 }
263 
264 /* Default target hook that outputs nothing to a stream.  */
265 void
no_asm_to_stream(FILE * file ATTRIBUTE_UNUSED)266 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
267 {
268 }
269 
270 /* Enable APP processing of subsequent output.
271    Used before the output from an `asm' statement.  */
272 
273 void
app_enable(void)274 app_enable (void)
275 {
276   if (! app_on)
277     {
278       fputs (ASM_APP_ON, asm_out_file);
279       app_on = 1;
280     }
281 }
282 
283 /* Disable APP processing of subsequent output.
284    Called from varasm.c before most kinds of output.  */
285 
286 void
app_disable(void)287 app_disable (void)
288 {
289   if (app_on)
290     {
291       fputs (ASM_APP_OFF, asm_out_file);
292       app_on = 0;
293     }
294 }
295 
296 /* Return the number of slots filled in the current
297    delayed branch sequence (we don't count the insn needing the
298    delay slot).   Zero if not in a delayed branch sequence.  */
299 
300 int
dbr_sequence_length(void)301 dbr_sequence_length (void)
302 {
303   if (final_sequence != 0)
304     return XVECLEN (final_sequence, 0) - 1;
305   else
306     return 0;
307 }
308 
309 /* The next two pages contain routines used to compute the length of an insn
310    and to shorten branches.  */
311 
312 /* Arrays for insn lengths, and addresses.  The latter is referenced by
313    `insn_current_length'.  */
314 
315 static int *insn_lengths;
316 
317 vec<int> insn_addresses_;
318 
319 /* Max uid for which the above arrays are valid.  */
320 static int insn_lengths_max_uid;
321 
322 /* Address of insn being processed.  Used by `insn_current_length'.  */
323 int insn_current_address;
324 
325 /* Address of insn being processed in previous iteration.  */
326 int insn_last_address;
327 
328 /* known invariant alignment of insn being processed.  */
329 int insn_current_align;
330 
331 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
332    gives the next following alignment insn that increases the known
333    alignment, or NULL_RTX if there is no such insn.
334    For any alignment obtained this way, we can again index uid_align with
335    its uid to obtain the next following align that in turn increases the
336    alignment, till we reach NULL_RTX; the sequence obtained this way
337    for each insn we'll call the alignment chain of this insn in the following
338    comments.  */
339 
340 static rtx *uid_align;
341 static int *uid_shuid;
342 static vec<align_flags> label_align;
343 
344 /* Indicate that branch shortening hasn't yet been done.  */
345 
346 void
init_insn_lengths(void)347 init_insn_lengths (void)
348 {
349   if (uid_shuid)
350     {
351       free (uid_shuid);
352       uid_shuid = 0;
353     }
354   if (insn_lengths)
355     {
356       free (insn_lengths);
357       insn_lengths = 0;
358       insn_lengths_max_uid = 0;
359     }
360   if (HAVE_ATTR_length)
361     INSN_ADDRESSES_FREE ();
362   if (uid_align)
363     {
364       free (uid_align);
365       uid_align = 0;
366     }
367 }
368 
369 /* Obtain the current length of an insn.  If branch shortening has been done,
370    get its actual length.  Otherwise, use FALLBACK_FN to calculate the
371    length.  */
372 static int
get_attr_length_1(rtx_insn * insn,int (* fallback_fn)(rtx_insn *))373 get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
374 {
375   rtx body;
376   int i;
377   int length = 0;
378 
379   if (!HAVE_ATTR_length)
380     return 0;
381 
382   if (insn_lengths_max_uid > INSN_UID (insn))
383     return insn_lengths[INSN_UID (insn)];
384   else
385     switch (GET_CODE (insn))
386       {
387       case NOTE:
388       case BARRIER:
389       case CODE_LABEL:
390       case DEBUG_INSN:
391 	return 0;
392 
393       case CALL_INSN:
394       case JUMP_INSN:
395 	length = fallback_fn (insn);
396 	break;
397 
398       case INSN:
399 	body = PATTERN (insn);
400 	if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
401 	  return 0;
402 
403 	else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
404 	  length = asm_insn_count (body) * fallback_fn (insn);
405 	else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
406 	  for (i = 0; i < seq->len (); i++)
407 	    length += get_attr_length_1 (seq->insn (i), fallback_fn);
408 	else
409 	  length = fallback_fn (insn);
410 	break;
411 
412       default:
413 	break;
414       }
415 
416 #ifdef ADJUST_INSN_LENGTH
417   ADJUST_INSN_LENGTH (insn, length);
418 #endif
419   return length;
420 }
421 
422 /* Obtain the current length of an insn.  If branch shortening has been done,
423    get its actual length.  Otherwise, get its maximum length.  */
424 int
get_attr_length(rtx_insn * insn)425 get_attr_length (rtx_insn *insn)
426 {
427   return get_attr_length_1 (insn, insn_default_length);
428 }
429 
430 /* Obtain the current length of an insn.  If branch shortening has been done,
431    get its actual length.  Otherwise, get its minimum length.  */
432 int
get_attr_min_length(rtx_insn * insn)433 get_attr_min_length (rtx_insn *insn)
434 {
435   return get_attr_length_1 (insn, insn_min_length);
436 }
437 
438 /* Code to handle alignment inside shorten_branches.  */
439 
440 /* Here is an explanation how the algorithm in align_fuzz can give
441    proper results:
442 
443    Call a sequence of instructions beginning with alignment point X
444    and continuing until the next alignment point `block X'.  When `X'
445    is used in an expression, it means the alignment value of the
446    alignment point.
447 
448    Call the distance between the start of the first insn of block X, and
449    the end of the last insn of block X `IX', for the `inner size of X'.
450    This is clearly the sum of the instruction lengths.
451 
452    Likewise with the next alignment-delimited block following X, which we
453    shall call block Y.
454 
455    Call the distance between the start of the first insn of block X, and
456    the start of the first insn of block Y `OX', for the `outer size of X'.
457 
458    The estimated padding is then OX - IX.
459 
460    OX can be safely estimated as
461 
462            if (X >= Y)
463                    OX = round_up(IX, Y)
464            else
465                    OX = round_up(IX, X) + Y - X
466 
467    Clearly est(IX) >= real(IX), because that only depends on the
468    instruction lengths, and those being overestimated is a given.
469 
470    Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
471    we needn't worry about that when thinking about OX.
472 
473    When X >= Y, the alignment provided by Y adds no uncertainty factor
474    for branch ranges starting before X, so we can just round what we have.
475    But when X < Y, we don't know anything about the, so to speak,
476    `middle bits', so we have to assume the worst when aligning up from an
477    address mod X to one mod Y, which is Y - X.  */
478 
479 #ifndef LABEL_ALIGN
480 #define LABEL_ALIGN(LABEL) align_labels
481 #endif
482 
483 #ifndef LOOP_ALIGN
484 #define LOOP_ALIGN(LABEL) align_loops
485 #endif
486 
487 #ifndef LABEL_ALIGN_AFTER_BARRIER
488 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
489 #endif
490 
491 #ifndef JUMP_ALIGN
492 #define JUMP_ALIGN(LABEL) align_jumps
493 #endif
494 
495 #ifndef ADDR_VEC_ALIGN
496 static int
final_addr_vec_align(rtx_jump_table_data * addr_vec)497 final_addr_vec_align (rtx_jump_table_data *addr_vec)
498 {
499   int align = GET_MODE_SIZE (addr_vec->get_data_mode ());
500 
501   if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
502     align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
503   return exact_log2 (align);
504 
505 }
506 
507 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
508 #endif
509 
510 #ifndef INSN_LENGTH_ALIGNMENT
511 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
512 #endif
513 
514 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
515 
516 static int min_labelno, max_labelno;
517 
518 #define LABEL_TO_ALIGNMENT(LABEL) \
519   (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno])
520 
521 /* For the benefit of port specific code do this also as a function.  */
522 
523 align_flags
label_to_alignment(rtx label)524 label_to_alignment (rtx label)
525 {
526   if (CODE_LABEL_NUMBER (label) <= max_labelno)
527     return LABEL_TO_ALIGNMENT (label);
528   return align_flags ();
529 }
530 
531 /* The differences in addresses
532    between a branch and its target might grow or shrink depending on
533    the alignment the start insn of the range (the branch for a forward
534    branch or the label for a backward branch) starts out on; if these
535    differences are used naively, they can even oscillate infinitely.
536    We therefore want to compute a 'worst case' address difference that
537    is independent of the alignment the start insn of the range end
538    up on, and that is at least as large as the actual difference.
539    The function align_fuzz calculates the amount we have to add to the
540    naively computed difference, by traversing the part of the alignment
541    chain of the start insn of the range that is in front of the end insn
542    of the range, and considering for each alignment the maximum amount
543    that it might contribute to a size increase.
544 
545    For casesi tables, we also want to know worst case minimum amounts of
546    address difference, in case a machine description wants to introduce
547    some common offset that is added to all offsets in a table.
548    For this purpose, align_fuzz with a growth argument of 0 computes the
549    appropriate adjustment.  */
550 
551 /* Compute the maximum delta by which the difference of the addresses of
552    START and END might grow / shrink due to a different address for start
553    which changes the size of alignment insns between START and END.
554    KNOWN_ALIGN_LOG is the alignment known for START.
555    GROWTH should be ~0 if the objective is to compute potential code size
556    increase, and 0 if the objective is to compute potential shrink.
557    The return value is undefined for any other value of GROWTH.  */
558 
559 static int
align_fuzz(rtx start,rtx end,int known_align_log,unsigned int growth)560 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
561 {
562   int uid = INSN_UID (start);
563   rtx align_label;
564   int known_align = 1 << known_align_log;
565   int end_shuid = INSN_SHUID (end);
566   int fuzz = 0;
567 
568   for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
569     {
570       int align_addr, new_align;
571 
572       uid = INSN_UID (align_label);
573       align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
574       if (uid_shuid[uid] > end_shuid)
575 	break;
576       align_flags alignment = LABEL_TO_ALIGNMENT (align_label);
577       new_align = 1 << alignment.levels[0].log;
578       if (new_align < known_align)
579 	continue;
580       fuzz += (-align_addr ^ growth) & (new_align - known_align);
581       known_align = new_align;
582     }
583   return fuzz;
584 }
585 
586 /* Compute a worst-case reference address of a branch so that it
587    can be safely used in the presence of aligned labels.  Since the
588    size of the branch itself is unknown, the size of the branch is
589    not included in the range.  I.e. for a forward branch, the reference
590    address is the end address of the branch as known from the previous
591    branch shortening pass, minus a value to account for possible size
592    increase due to alignment.  For a backward branch, it is the start
593    address of the branch as known from the current pass, plus a value
594    to account for possible size increase due to alignment.
595    NB.: Therefore, the maximum offset allowed for backward branches needs
596    to exclude the branch size.  */
597 
598 int
insn_current_reference_address(rtx_insn * branch)599 insn_current_reference_address (rtx_insn *branch)
600 {
601   rtx dest;
602   int seq_uid;
603 
604   if (! INSN_ADDRESSES_SET_P ())
605     return 0;
606 
607   rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
608   seq_uid = INSN_UID (seq);
609   if (!jump_to_label_p (branch))
610     /* This can happen for example on the PA; the objective is to know the
611        offset to address something in front of the start of the function.
612        Thus, we can treat it like a backward branch.
613        We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
614        any alignment we'd encounter, so we skip the call to align_fuzz.  */
615     return insn_current_address;
616   dest = JUMP_LABEL (branch);
617 
618   /* BRANCH has no proper alignment chain set, so use SEQ.
619      BRANCH also has no INSN_SHUID.  */
620   if (INSN_SHUID (seq) < INSN_SHUID (dest))
621     {
622       /* Forward branch.  */
623       return (insn_last_address + insn_lengths[seq_uid]
624 	      - align_fuzz (seq, dest, length_unit_log, ~0));
625     }
626   else
627     {
628       /* Backward branch.  */
629       return (insn_current_address
630 	      + align_fuzz (dest, seq, length_unit_log, ~0));
631     }
632 }
633 
634 /* Compute branch alignments based on CFG profile.  */
635 
636 unsigned int
compute_alignments(void)637 compute_alignments (void)
638 {
639   basic_block bb;
640   align_flags max_alignment;
641 
642   label_align.truncate (0);
643 
644   max_labelno = max_label_num ();
645   min_labelno = get_first_label_num ();
646   label_align.safe_grow_cleared (max_labelno - min_labelno + 1);
647 
648   /* If not optimizing or optimizing for size, don't assign any alignments.  */
649   if (! optimize || optimize_function_for_size_p (cfun))
650     return 0;
651 
652   if (dump_file)
653     {
654       dump_reg_info (dump_file);
655       dump_flow_info (dump_file, TDF_DETAILS);
656       flow_loops_dump (dump_file, NULL, 1);
657     }
658   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
659   profile_count count_threshold = cfun->cfg->count_max.apply_scale
660 		 (1, PARAM_VALUE (PARAM_ALIGN_THRESHOLD));
661 
662   if (dump_file)
663     {
664       fprintf (dump_file, "count_max: ");
665       cfun->cfg->count_max.dump (dump_file);
666       fprintf (dump_file, "\n");
667     }
668   FOR_EACH_BB_FN (bb, cfun)
669     {
670       rtx_insn *label = BB_HEAD (bb);
671       bool has_fallthru = 0;
672       edge e;
673       edge_iterator ei;
674 
675       if (!LABEL_P (label)
676 	  || optimize_bb_for_size_p (bb))
677 	{
678 	  if (dump_file)
679 	    fprintf (dump_file,
680 		     "BB %4i loop %2i loop_depth %2i skipped.\n",
681 		     bb->index,
682 		     bb->loop_father->num,
683 		     bb_loop_depth (bb));
684 	  continue;
685 	}
686       max_alignment = LABEL_ALIGN (label);
687       profile_count fallthru_count = profile_count::zero ();
688       profile_count branch_count = profile_count::zero ();
689 
690       FOR_EACH_EDGE (e, ei, bb->preds)
691 	{
692 	  if (e->flags & EDGE_FALLTHRU)
693 	    has_fallthru = 1, fallthru_count += e->count ();
694 	  else
695 	    branch_count += e->count ();
696 	}
697       if (dump_file)
698 	{
699 	  fprintf (dump_file, "BB %4i loop %2i loop_depth"
700 		   " %2i fall ",
701 		   bb->index, bb->loop_father->num,
702 		   bb_loop_depth (bb));
703 	  fallthru_count.dump (dump_file);
704 	  fprintf (dump_file, " branch ");
705 	  branch_count.dump (dump_file);
706 	  if (!bb->loop_father->inner && bb->loop_father->num)
707 	    fprintf (dump_file, " inner_loop");
708 	  if (bb->loop_father->header == bb)
709 	    fprintf (dump_file, " loop_header");
710 	  fprintf (dump_file, "\n");
711 	}
712       if (!fallthru_count.initialized_p () || !branch_count.initialized_p ())
713 	continue;
714 
715       /* There are two purposes to align block with no fallthru incoming edge:
716 	 1) to avoid fetch stalls when branch destination is near cache boundary
717 	 2) to improve cache efficiency in case the previous block is not executed
718 	    (so it does not need to be in the cache).
719 
720 	 We to catch first case, we align frequently executed blocks.
721 	 To catch the second, we align blocks that are executed more frequently
722 	 than the predecessor and the predecessor is likely to not be executed
723 	 when function is called.  */
724 
725       if (!has_fallthru
726 	  && (branch_count > count_threshold
727 	      || (bb->count > bb->prev_bb->count.apply_scale (10, 1)
728 		  && (bb->prev_bb->count
729 		      <= ENTRY_BLOCK_PTR_FOR_FN (cfun)
730 			   ->count.apply_scale (1, 2)))))
731 	{
732 	  align_flags alignment = JUMP_ALIGN (label);
733 	  if (dump_file)
734 	    fprintf (dump_file, "  jump alignment added.\n");
735 	  max_alignment = align_flags::max (max_alignment, alignment);
736 	}
737       /* In case block is frequent and reached mostly by non-fallthru edge,
738 	 align it.  It is most likely a first block of loop.  */
739       if (has_fallthru
740 	  && !(single_succ_p (bb)
741 	       && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
742 	  && optimize_bb_for_speed_p (bb)
743 	  && branch_count + fallthru_count > count_threshold
744 	  && (branch_count
745 	      > fallthru_count.apply_scale
746 		    (PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS), 1)))
747 	{
748 	  align_flags alignment = LOOP_ALIGN (label);
749 	  if (dump_file)
750 	    fprintf (dump_file, "  internal loop alignment added.\n");
751 	  max_alignment = align_flags::max (max_alignment, alignment);
752 	}
753       LABEL_TO_ALIGNMENT (label) = max_alignment;
754     }
755 
756   loop_optimizer_finalize ();
757   free_dominance_info (CDI_DOMINATORS);
758   return 0;
759 }
760 
761 /* Grow the LABEL_ALIGN array after new labels are created.  */
762 
763 static void
grow_label_align(void)764 grow_label_align (void)
765 {
766   int old = max_labelno;
767   int n_labels;
768   int n_old_labels;
769 
770   max_labelno = max_label_num ();
771 
772   n_labels = max_labelno - min_labelno + 1;
773   n_old_labels = old - min_labelno + 1;
774 
775   label_align.safe_grow_cleared (n_labels);
776 
777   /* Range of labels grows monotonically in the function.  Failing here
778      means that the initialization of array got lost.  */
779   gcc_assert (n_old_labels <= n_labels);
780 }
781 
782 /* Update the already computed alignment information.  LABEL_PAIRS is a vector
783    made up of pairs of labels for which the alignment information of the first
784    element will be copied from that of the second element.  */
785 
786 void
update_alignments(vec<rtx> & label_pairs)787 update_alignments (vec<rtx> &label_pairs)
788 {
789   unsigned int i = 0;
790   rtx iter, label = NULL_RTX;
791 
792   if (max_labelno != max_label_num ())
793     grow_label_align ();
794 
795   FOR_EACH_VEC_ELT (label_pairs, i, iter)
796     if (i & 1)
797       LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
798     else
799       label = iter;
800 }
801 
802 namespace {
803 
804 const pass_data pass_data_compute_alignments =
805 {
806   RTL_PASS, /* type */
807   "alignments", /* name */
808   OPTGROUP_NONE, /* optinfo_flags */
809   TV_NONE, /* tv_id */
810   0, /* properties_required */
811   0, /* properties_provided */
812   0, /* properties_destroyed */
813   0, /* todo_flags_start */
814   0, /* todo_flags_finish */
815 };
816 
817 class pass_compute_alignments : public rtl_opt_pass
818 {
819 public:
pass_compute_alignments(gcc::context * ctxt)820   pass_compute_alignments (gcc::context *ctxt)
821     : rtl_opt_pass (pass_data_compute_alignments, ctxt)
822   {}
823 
824   /* opt_pass methods: */
execute(function *)825   virtual unsigned int execute (function *) { return compute_alignments (); }
826 
827 }; // class pass_compute_alignments
828 
829 } // anon namespace
830 
831 rtl_opt_pass *
make_pass_compute_alignments(gcc::context * ctxt)832 make_pass_compute_alignments (gcc::context *ctxt)
833 {
834   return new pass_compute_alignments (ctxt);
835 }
836 
837 
838 /* Make a pass over all insns and compute their actual lengths by shortening
839    any branches of variable length if possible.  */
840 
841 /* shorten_branches might be called multiple times:  for example, the SH
842    port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
843    In order to do this, it needs proper length information, which it obtains
844    by calling shorten_branches.  This cannot be collapsed with
845    shorten_branches itself into a single pass unless we also want to integrate
846    reorg.c, since the branch splitting exposes new instructions with delay
847    slots.  */
848 
849 void
shorten_branches(rtx_insn * first)850 shorten_branches (rtx_insn *first)
851 {
852   rtx_insn *insn;
853   int max_uid;
854   int i;
855   rtx_insn *seq;
856   int something_changed = 1;
857   char *varying_length;
858   rtx body;
859   int uid;
860   rtx align_tab[MAX_CODE_ALIGN + 1];
861 
862   /* Compute maximum UID and allocate label_align / uid_shuid.  */
863   max_uid = get_max_uid ();
864 
865   /* Free uid_shuid before reallocating it.  */
866   free (uid_shuid);
867 
868   uid_shuid = XNEWVEC (int, max_uid);
869 
870   if (max_labelno != max_label_num ())
871     grow_label_align ();
872 
873   /* Initialize label_align and set up uid_shuid to be strictly
874      monotonically rising with insn order.  */
875   /* We use alignment here to keep track of the maximum alignment we want to
876      impose on the next CODE_LABEL (or the current one if we are processing
877      the CODE_LABEL itself).  */
878 
879   align_flags max_alignment;
880 
881   for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
882     {
883       INSN_SHUID (insn) = i++;
884       if (INSN_P (insn))
885 	continue;
886 
887       if (rtx_code_label *label = dyn_cast <rtx_code_label *> (insn))
888 	{
889 	  /* Merge in alignments computed by compute_alignments.  */
890 	  align_flags alignment = LABEL_TO_ALIGNMENT (label);
891 	  max_alignment = align_flags::max (max_alignment, alignment);
892 
893 	  rtx_jump_table_data *table = jump_table_for_label (label);
894 	  if (!table)
895 	    {
896 	      align_flags alignment = LABEL_ALIGN (label);
897 	      max_alignment = align_flags::max (max_alignment, alignment);
898 	    }
899 	  /* ADDR_VECs only take room if read-only data goes into the text
900 	     section.  */
901 	  if ((JUMP_TABLES_IN_TEXT_SECTION
902 	       || readonly_data_section == text_section)
903 	      && table)
904 	    {
905 	      align_flags alignment = align_flags (ADDR_VEC_ALIGN (table));
906 	      max_alignment = align_flags::max (max_alignment, alignment);
907 	    }
908 	  LABEL_TO_ALIGNMENT (label) = max_alignment;
909 	  max_alignment = align_flags ();
910 	}
911       else if (BARRIER_P (insn))
912 	{
913 	  rtx_insn *label;
914 
915 	  for (label = insn; label && ! INSN_P (label);
916 	       label = NEXT_INSN (label))
917 	    if (LABEL_P (label))
918 	      {
919 		align_flags alignment
920 		  = align_flags (LABEL_ALIGN_AFTER_BARRIER (insn));
921 		max_alignment = align_flags::max (max_alignment, alignment);
922 		break;
923 	      }
924 	}
925     }
926   if (!HAVE_ATTR_length)
927     return;
928 
929   /* Allocate the rest of the arrays.  */
930   insn_lengths = XNEWVEC (int, max_uid);
931   insn_lengths_max_uid = max_uid;
932   /* Syntax errors can lead to labels being outside of the main insn stream.
933      Initialize insn_addresses, so that we get reproducible results.  */
934   INSN_ADDRESSES_ALLOC (max_uid);
935 
936   varying_length = XCNEWVEC (char, max_uid);
937 
938   /* Initialize uid_align.  We scan instructions
939      from end to start, and keep in align_tab[n] the last seen insn
940      that does an alignment of at least n+1, i.e. the successor
941      in the alignment chain for an insn that does / has a known
942      alignment of n.  */
943   uid_align = XCNEWVEC (rtx, max_uid);
944 
945   for (i = MAX_CODE_ALIGN + 1; --i >= 0;)
946     align_tab[i] = NULL_RTX;
947   seq = get_last_insn ();
948   for (; seq; seq = PREV_INSN (seq))
949     {
950       int uid = INSN_UID (seq);
951       int log;
952       log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq).levels[0].log : 0);
953       uid_align[uid] = align_tab[0];
954       if (log)
955 	{
956 	  /* Found an alignment label.  */
957 	  gcc_checking_assert (log < MAX_CODE_ALIGN + 1);
958 	  uid_align[uid] = align_tab[log];
959 	  for (i = log - 1; i >= 0; i--)
960 	    align_tab[i] = seq;
961 	}
962     }
963 
964   /* When optimizing, we start assuming minimum length, and keep increasing
965      lengths as we find the need for this, till nothing changes.
966      When not optimizing, we start assuming maximum lengths, and
967      do a single pass to update the lengths.  */
968   bool increasing = optimize != 0;
969 
970 #ifdef CASE_VECTOR_SHORTEN_MODE
971   if (optimize)
972     {
973       /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
974          label fields.  */
975 
976       int min_shuid = INSN_SHUID (get_insns ()) - 1;
977       int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
978       int rel;
979 
980       for (insn = first; insn != 0; insn = NEXT_INSN (insn))
981 	{
982 	  rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
983 	  int len, i, min, max, insn_shuid;
984 	  int min_align;
985 	  addr_diff_vec_flags flags;
986 
987 	  if (! JUMP_TABLE_DATA_P (insn)
988 	      || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
989 	    continue;
990 	  pat = PATTERN (insn);
991 	  len = XVECLEN (pat, 1);
992 	  gcc_assert (len > 0);
993 	  min_align = MAX_CODE_ALIGN;
994 	  for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
995 	    {
996 	      rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
997 	      int shuid = INSN_SHUID (lab);
998 	      if (shuid < min)
999 		{
1000 		  min = shuid;
1001 		  min_lab = lab;
1002 		}
1003 	      if (shuid > max)
1004 		{
1005 		  max = shuid;
1006 		  max_lab = lab;
1007 		}
1008 
1009 	      int label_alignment = LABEL_TO_ALIGNMENT (lab).levels[0].log;
1010 	      if (min_align > label_alignment)
1011 		min_align = label_alignment;
1012 	    }
1013 	  XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1014 	  XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1015 	  insn_shuid = INSN_SHUID (insn);
1016 	  rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1017 	  memset (&flags, 0, sizeof (flags));
1018 	  flags.min_align = min_align;
1019 	  flags.base_after_vec = rel > insn_shuid;
1020 	  flags.min_after_vec  = min > insn_shuid;
1021 	  flags.max_after_vec  = max > insn_shuid;
1022 	  flags.min_after_base = min > rel;
1023 	  flags.max_after_base = max > rel;
1024 	  ADDR_DIFF_VEC_FLAGS (pat) = flags;
1025 
1026 	  if (increasing)
1027 	    PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1028 	}
1029     }
1030 #endif /* CASE_VECTOR_SHORTEN_MODE */
1031 
1032   /* Compute initial lengths, addresses, and varying flags for each insn.  */
1033   int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1034 
1035   for (insn_current_address = 0, insn = first;
1036        insn != 0;
1037        insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1038     {
1039       uid = INSN_UID (insn);
1040 
1041       insn_lengths[uid] = 0;
1042 
1043       if (LABEL_P (insn))
1044 	{
1045 	  int log = LABEL_TO_ALIGNMENT (insn).levels[0].log;
1046 	  if (log)
1047 	    {
1048 	      int align = 1 << log;
1049 	      int new_address = (insn_current_address + align - 1) & -align;
1050 	      insn_lengths[uid] = new_address - insn_current_address;
1051 	    }
1052 	}
1053 
1054       INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1055 
1056       if (NOTE_P (insn) || BARRIER_P (insn)
1057 	  || LABEL_P (insn) || DEBUG_INSN_P (insn))
1058 	continue;
1059       if (insn->deleted ())
1060 	continue;
1061 
1062       body = PATTERN (insn);
1063       if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1064 	{
1065 	  /* This only takes room if read-only data goes into the text
1066 	     section.  */
1067 	  if (JUMP_TABLES_IN_TEXT_SECTION
1068 	      || readonly_data_section == text_section)
1069 	    insn_lengths[uid] = (XVECLEN (body,
1070 					  GET_CODE (body) == ADDR_DIFF_VEC)
1071 				 * GET_MODE_SIZE (table->get_data_mode ()));
1072 	  /* Alignment is handled by ADDR_VEC_ALIGN.  */
1073 	}
1074       else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1075 	insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1076       else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1077 	{
1078 	  int i;
1079 	  int const_delay_slots;
1080 	  if (DELAY_SLOTS)
1081 	    const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1082 	  else
1083 	    const_delay_slots = 0;
1084 
1085 	  int (*inner_length_fun) (rtx_insn *)
1086 	    = const_delay_slots ? length_fun : insn_default_length;
1087 	  /* Inside a delay slot sequence, we do not do any branch shortening
1088 	     if the shortening could change the number of delay slots
1089 	     of the branch.  */
1090 	  for (i = 0; i < body_seq->len (); i++)
1091 	    {
1092 	      rtx_insn *inner_insn = body_seq->insn (i);
1093 	      int inner_uid = INSN_UID (inner_insn);
1094 	      int inner_length;
1095 
1096 	      if (GET_CODE (PATTERN (inner_insn)) == ASM_INPUT
1097 		  || asm_noperands (PATTERN (inner_insn)) >= 0)
1098 		inner_length = (asm_insn_count (PATTERN (inner_insn))
1099 				* insn_default_length (inner_insn));
1100 	      else
1101 		inner_length = inner_length_fun (inner_insn);
1102 
1103 	      insn_lengths[inner_uid] = inner_length;
1104 	      if (const_delay_slots)
1105 		{
1106 		  if ((varying_length[inner_uid]
1107 		       = insn_variable_length_p (inner_insn)) != 0)
1108 		    varying_length[uid] = 1;
1109 		  INSN_ADDRESSES (inner_uid) = (insn_current_address
1110 						+ insn_lengths[uid]);
1111 		}
1112 	      else
1113 		varying_length[inner_uid] = 0;
1114 	      insn_lengths[uid] += inner_length;
1115 	    }
1116 	}
1117       else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1118 	{
1119 	  insn_lengths[uid] = length_fun (insn);
1120 	  varying_length[uid] = insn_variable_length_p (insn);
1121 	}
1122 
1123       /* If needed, do any adjustment.  */
1124 #ifdef ADJUST_INSN_LENGTH
1125       ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1126       if (insn_lengths[uid] < 0)
1127 	fatal_insn ("negative insn length", insn);
1128 #endif
1129     }
1130 
1131   /* Now loop over all the insns finding varying length insns.  For each,
1132      get the current insn length.  If it has changed, reflect the change.
1133      When nothing changes for a full pass, we are done.  */
1134 
1135   while (something_changed)
1136     {
1137       something_changed = 0;
1138       insn_current_align = MAX_CODE_ALIGN - 1;
1139       for (insn_current_address = 0, insn = first;
1140 	   insn != 0;
1141 	   insn = NEXT_INSN (insn))
1142 	{
1143 	  int new_length;
1144 #ifdef ADJUST_INSN_LENGTH
1145 	  int tmp_length;
1146 #endif
1147 	  int length_align;
1148 
1149 	  uid = INSN_UID (insn);
1150 
1151 	  if (rtx_code_label *label = dyn_cast <rtx_code_label *> (insn))
1152 	    {
1153 	      int log = LABEL_TO_ALIGNMENT (label).levels[0].log;
1154 
1155 #ifdef CASE_VECTOR_SHORTEN_MODE
1156 	      /* If the mode of a following jump table was changed, we
1157 		 may need to update the alignment of this label.  */
1158 
1159 	      if (JUMP_TABLES_IN_TEXT_SECTION
1160 		  || readonly_data_section == text_section)
1161 		{
1162 		  rtx_jump_table_data *table = jump_table_for_label (label);
1163 		  if (table)
1164 		    {
1165 		      int newlog = ADDR_VEC_ALIGN (table);
1166 		      if (newlog != log)
1167 			{
1168 			  log = newlog;
1169 			  LABEL_TO_ALIGNMENT (insn) = log;
1170 			  something_changed = 1;
1171 			}
1172 		    }
1173 		}
1174 #endif
1175 
1176 	      if (log > insn_current_align)
1177 		{
1178 		  int align = 1 << log;
1179 		  int new_address= (insn_current_address + align - 1) & -align;
1180 		  insn_lengths[uid] = new_address - insn_current_address;
1181 		  insn_current_align = log;
1182 		  insn_current_address = new_address;
1183 		}
1184 	      else
1185 		insn_lengths[uid] = 0;
1186 	      INSN_ADDRESSES (uid) = insn_current_address;
1187 	      continue;
1188 	    }
1189 
1190 	  length_align = INSN_LENGTH_ALIGNMENT (insn);
1191 	  if (length_align < insn_current_align)
1192 	    insn_current_align = length_align;
1193 
1194 	  insn_last_address = INSN_ADDRESSES (uid);
1195 	  INSN_ADDRESSES (uid) = insn_current_address;
1196 
1197 #ifdef CASE_VECTOR_SHORTEN_MODE
1198 	  if (optimize
1199 	      && JUMP_TABLE_DATA_P (insn)
1200 	      && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1201 	    {
1202 	      rtx_jump_table_data *table = as_a <rtx_jump_table_data *> (insn);
1203 	      rtx body = PATTERN (insn);
1204 	      int old_length = insn_lengths[uid];
1205 	      rtx_insn *rel_lab =
1206 		safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1207 	      rtx min_lab = XEXP (XEXP (body, 2), 0);
1208 	      rtx max_lab = XEXP (XEXP (body, 3), 0);
1209 	      int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1210 	      int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1211 	      int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1212 	      rtx_insn *prev;
1213 	      int rel_align = 0;
1214 	      addr_diff_vec_flags flags;
1215 	      scalar_int_mode vec_mode;
1216 
1217 	      /* Avoid automatic aggregate initialization.  */
1218 	      flags = ADDR_DIFF_VEC_FLAGS (body);
1219 
1220 	      /* Try to find a known alignment for rel_lab.  */
1221 	      for (prev = rel_lab;
1222 		   prev
1223 		   && ! insn_lengths[INSN_UID (prev)]
1224 		   && ! (varying_length[INSN_UID (prev)] & 1);
1225 		   prev = PREV_INSN (prev))
1226 		if (varying_length[INSN_UID (prev)] & 2)
1227 		  {
1228 		    rel_align = LABEL_TO_ALIGNMENT (prev).levels[0].log;
1229 		    break;
1230 		  }
1231 
1232 	      /* See the comment on addr_diff_vec_flags in rtl.h for the
1233 		 meaning of the flags values.  base: REL_LAB   vec: INSN  */
1234 	      /* Anything after INSN has still addresses from the last
1235 		 pass; adjust these so that they reflect our current
1236 		 estimate for this pass.  */
1237 	      if (flags.base_after_vec)
1238 		rel_addr += insn_current_address - insn_last_address;
1239 	      if (flags.min_after_vec)
1240 		min_addr += insn_current_address - insn_last_address;
1241 	      if (flags.max_after_vec)
1242 		max_addr += insn_current_address - insn_last_address;
1243 	      /* We want to know the worst case, i.e. lowest possible value
1244 		 for the offset of MIN_LAB.  If MIN_LAB is after REL_LAB,
1245 		 its offset is positive, and we have to be wary of code shrink;
1246 		 otherwise, it is negative, and we have to be vary of code
1247 		 size increase.  */
1248 	      if (flags.min_after_base)
1249 		{
1250 		  /* If INSN is between REL_LAB and MIN_LAB, the size
1251 		     changes we are about to make can change the alignment
1252 		     within the observed offset, therefore we have to break
1253 		     it up into two parts that are independent.  */
1254 		  if (! flags.base_after_vec && flags.min_after_vec)
1255 		    {
1256 		      min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1257 		      min_addr -= align_fuzz (insn, min_lab, 0, 0);
1258 		    }
1259 		  else
1260 		    min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1261 		}
1262 	      else
1263 		{
1264 		  if (flags.base_after_vec && ! flags.min_after_vec)
1265 		    {
1266 		      min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1267 		      min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1268 		    }
1269 		  else
1270 		    min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1271 		}
1272 	      /* Likewise, determine the highest lowest possible value
1273 		 for the offset of MAX_LAB.  */
1274 	      if (flags.max_after_base)
1275 		{
1276 		  if (! flags.base_after_vec && flags.max_after_vec)
1277 		    {
1278 		      max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1279 		      max_addr += align_fuzz (insn, max_lab, 0, ~0);
1280 		    }
1281 		  else
1282 		    max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1283 		}
1284 	      else
1285 		{
1286 		  if (flags.base_after_vec && ! flags.max_after_vec)
1287 		    {
1288 		      max_addr += align_fuzz (max_lab, insn, 0, 0);
1289 		      max_addr += align_fuzz (insn, rel_lab, 0, 0);
1290 		    }
1291 		  else
1292 		    max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1293 		}
1294 	      vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1295 						   max_addr - rel_addr, body);
1296 	      if (!increasing
1297 		  || (GET_MODE_SIZE (vec_mode)
1298 		      >= GET_MODE_SIZE (table->get_data_mode ())))
1299 		PUT_MODE (body, vec_mode);
1300 	      if (JUMP_TABLES_IN_TEXT_SECTION
1301 		  || readonly_data_section == text_section)
1302 		{
1303 		  insn_lengths[uid]
1304 		    = (XVECLEN (body, 1)
1305 		       * GET_MODE_SIZE (table->get_data_mode ()));
1306 		  insn_current_address += insn_lengths[uid];
1307 		  if (insn_lengths[uid] != old_length)
1308 		    something_changed = 1;
1309 		}
1310 
1311 	      continue;
1312 	    }
1313 #endif /* CASE_VECTOR_SHORTEN_MODE */
1314 
1315 	  if (! (varying_length[uid]))
1316 	    {
1317 	      if (NONJUMP_INSN_P (insn)
1318 		  && GET_CODE (PATTERN (insn)) == SEQUENCE)
1319 		{
1320 		  int i;
1321 
1322 		  body = PATTERN (insn);
1323 		  for (i = 0; i < XVECLEN (body, 0); i++)
1324 		    {
1325 		      rtx inner_insn = XVECEXP (body, 0, i);
1326 		      int inner_uid = INSN_UID (inner_insn);
1327 
1328 		      INSN_ADDRESSES (inner_uid) = insn_current_address;
1329 
1330 		      insn_current_address += insn_lengths[inner_uid];
1331 		    }
1332 		}
1333 	      else
1334 		insn_current_address += insn_lengths[uid];
1335 
1336 	      continue;
1337 	    }
1338 
1339 	  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1340 	    {
1341 	      rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1342 	      int i;
1343 
1344 	      body = PATTERN (insn);
1345 	      new_length = 0;
1346 	      for (i = 0; i < seqn->len (); i++)
1347 		{
1348 		  rtx_insn *inner_insn = seqn->insn (i);
1349 		  int inner_uid = INSN_UID (inner_insn);
1350 		  int inner_length;
1351 
1352 		  INSN_ADDRESSES (inner_uid) = insn_current_address;
1353 
1354 		  /* insn_current_length returns 0 for insns with a
1355 		     non-varying length.  */
1356 		  if (! varying_length[inner_uid])
1357 		    inner_length = insn_lengths[inner_uid];
1358 		  else
1359 		    inner_length = insn_current_length (inner_insn);
1360 
1361 		  if (inner_length != insn_lengths[inner_uid])
1362 		    {
1363 		      if (!increasing || inner_length > insn_lengths[inner_uid])
1364 			{
1365 			  insn_lengths[inner_uid] = inner_length;
1366 			  something_changed = 1;
1367 			}
1368 		      else
1369 			inner_length = insn_lengths[inner_uid];
1370 		    }
1371 		  insn_current_address += inner_length;
1372 		  new_length += inner_length;
1373 		}
1374 	    }
1375 	  else
1376 	    {
1377 	      new_length = insn_current_length (insn);
1378 	      insn_current_address += new_length;
1379 	    }
1380 
1381 #ifdef ADJUST_INSN_LENGTH
1382 	  /* If needed, do any adjustment.  */
1383 	  tmp_length = new_length;
1384 	  ADJUST_INSN_LENGTH (insn, new_length);
1385 	  insn_current_address += (new_length - tmp_length);
1386 #endif
1387 
1388 	  if (new_length != insn_lengths[uid]
1389 	      && (!increasing || new_length > insn_lengths[uid]))
1390 	    {
1391 	      insn_lengths[uid] = new_length;
1392 	      something_changed = 1;
1393 	    }
1394 	  else
1395 	    insn_current_address += insn_lengths[uid] - new_length;
1396 	}
1397       /* For a non-optimizing compile, do only a single pass.  */
1398       if (!increasing)
1399 	break;
1400     }
1401   crtl->max_insn_address = insn_current_address;
1402   free (varying_length);
1403 }
1404 
1405 /* Given the body of an INSN known to be generated by an ASM statement, return
1406    the number of machine instructions likely to be generated for this insn.
1407    This is used to compute its length.  */
1408 
1409 static int
asm_insn_count(rtx body)1410 asm_insn_count (rtx body)
1411 {
1412   const char *templ;
1413 
1414   if (GET_CODE (body) == ASM_INPUT)
1415     templ = XSTR (body, 0);
1416   else
1417     templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1418 
1419   return asm_str_count (templ);
1420 }
1421 
1422 /* Return the number of machine instructions likely to be generated for the
1423    inline-asm template. */
1424 int
asm_str_count(const char * templ)1425 asm_str_count (const char *templ)
1426 {
1427   int count = 1;
1428 
1429   if (!*templ)
1430     return 0;
1431 
1432   for (; *templ; templ++)
1433     if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1434 	|| *templ == '\n')
1435       count++;
1436 
1437   return count;
1438 }
1439 
1440 /* Return true if DWARF2 debug info can be emitted for DECL.  */
1441 
1442 static bool
dwarf2_debug_info_emitted_p(tree decl)1443 dwarf2_debug_info_emitted_p (tree decl)
1444 {
1445   if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1446     return false;
1447 
1448   if (DECL_IGNORED_P (decl))
1449     return false;
1450 
1451   return true;
1452 }
1453 
1454 /* Return scope resulting from combination of S1 and S2.  */
1455 static tree
choose_inner_scope(tree s1,tree s2)1456 choose_inner_scope (tree s1, tree s2)
1457 {
1458    if (!s1)
1459      return s2;
1460    if (!s2)
1461      return s1;
1462    if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1463      return s1;
1464    return s2;
1465 }
1466 
1467 /* Emit lexical block notes needed to change scope from S1 to S2.  */
1468 
1469 static void
change_scope(rtx_insn * orig_insn,tree s1,tree s2)1470 change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1471 {
1472   rtx_insn *insn = orig_insn;
1473   tree com = NULL_TREE;
1474   tree ts1 = s1, ts2 = s2;
1475   tree s;
1476 
1477   while (ts1 != ts2)
1478     {
1479       gcc_assert (ts1 && ts2);
1480       if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1481 	ts1 = BLOCK_SUPERCONTEXT (ts1);
1482       else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1483 	ts2 = BLOCK_SUPERCONTEXT (ts2);
1484       else
1485 	{
1486 	  ts1 = BLOCK_SUPERCONTEXT (ts1);
1487 	  ts2 = BLOCK_SUPERCONTEXT (ts2);
1488 	}
1489     }
1490   com = ts1;
1491 
1492   /* Close scopes.  */
1493   s = s1;
1494   while (s != com)
1495     {
1496       rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1497       NOTE_BLOCK (note) = s;
1498       s = BLOCK_SUPERCONTEXT (s);
1499     }
1500 
1501   /* Open scopes.  */
1502   s = s2;
1503   while (s != com)
1504     {
1505       insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1506       NOTE_BLOCK (insn) = s;
1507       s = BLOCK_SUPERCONTEXT (s);
1508     }
1509 }
1510 
1511 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1512    on the scope tree and the newly reordered instructions.  */
1513 
1514 static void
reemit_insn_block_notes(void)1515 reemit_insn_block_notes (void)
1516 {
1517   tree cur_block = DECL_INITIAL (cfun->decl);
1518   rtx_insn *insn;
1519 
1520   insn = get_insns ();
1521   for (; insn; insn = NEXT_INSN (insn))
1522     {
1523       tree this_block;
1524 
1525       /* Prevent lexical blocks from straddling section boundaries.  */
1526       if (NOTE_P (insn))
1527 	switch (NOTE_KIND (insn))
1528 	  {
1529 	  case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1530 	    {
1531 	      for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1532 		   s = BLOCK_SUPERCONTEXT (s))
1533 		{
1534 		  rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1535 		  NOTE_BLOCK (note) = s;
1536 		  note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1537 		  NOTE_BLOCK (note) = s;
1538 		}
1539 	    }
1540 	    break;
1541 
1542 	  case NOTE_INSN_BEGIN_STMT:
1543 	  case NOTE_INSN_INLINE_ENTRY:
1544 	    this_block = LOCATION_BLOCK (NOTE_MARKER_LOCATION (insn));
1545 	    goto set_cur_block_to_this_block;
1546 
1547 	  default:
1548 	    continue;
1549 	}
1550 
1551       if (!active_insn_p (insn))
1552         continue;
1553 
1554       /* Avoid putting scope notes between jump table and its label.  */
1555       if (JUMP_TABLE_DATA_P (insn))
1556 	continue;
1557 
1558       this_block = insn_scope (insn);
1559       /* For sequences compute scope resulting from merging all scopes
1560 	 of instructions nested inside.  */
1561       if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1562 	{
1563 	  int i;
1564 
1565 	  this_block = NULL;
1566 	  for (i = 0; i < body->len (); i++)
1567 	    this_block = choose_inner_scope (this_block,
1568 					     insn_scope (body->insn (i)));
1569 	}
1570     set_cur_block_to_this_block:
1571       if (! this_block)
1572 	{
1573 	  if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1574 	    continue;
1575 	  else
1576 	    this_block = DECL_INITIAL (cfun->decl);
1577 	}
1578 
1579       if (this_block != cur_block)
1580 	{
1581 	  change_scope (insn, cur_block, this_block);
1582 	  cur_block = this_block;
1583 	}
1584     }
1585 
1586   /* change_scope emits before the insn, not after.  */
1587   rtx_note *note = emit_note (NOTE_INSN_DELETED);
1588   change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1589   delete_insn (note);
1590 
1591   reorder_blocks ();
1592 }
1593 
1594 static const char *some_local_dynamic_name;
1595 
1596 /* Locate some local-dynamic symbol still in use by this function
1597    so that we can print its name in local-dynamic base patterns.
1598    Return null if there are no local-dynamic references.  */
1599 
1600 const char *
get_some_local_dynamic_name()1601 get_some_local_dynamic_name ()
1602 {
1603   subrtx_iterator::array_type array;
1604   rtx_insn *insn;
1605 
1606   if (some_local_dynamic_name)
1607     return some_local_dynamic_name;
1608 
1609   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1610     if (NONDEBUG_INSN_P (insn))
1611       FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1612 	{
1613 	  const_rtx x = *iter;
1614 	  if (GET_CODE (x) == SYMBOL_REF)
1615 	    {
1616 	      if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1617 		return some_local_dynamic_name = XSTR (x, 0);
1618 	      if (CONSTANT_POOL_ADDRESS_P (x))
1619 		iter.substitute (get_pool_constant (x));
1620 	    }
1621 	}
1622 
1623   return 0;
1624 }
1625 
1626 /* Arrange for us to emit a source location note before any further
1627    real insns or section changes, by setting the SEEN_NEXT_VIEW bit in
1628    *SEEN, as long as we are keeping track of location views.  The bit
1629    indicates we have referenced the next view at the current PC, so we
1630    have to emit it.  This should be called next to the var_location
1631    debug hook.  */
1632 
1633 static inline void
set_next_view_needed(int * seen)1634 set_next_view_needed (int *seen)
1635 {
1636   if (debug_variable_location_views)
1637     *seen |= SEEN_NEXT_VIEW;
1638 }
1639 
1640 /* Clear the flag in *SEEN indicating we need to emit the next view.
1641    This should be called next to the source_line debug hook.  */
1642 
1643 static inline void
clear_next_view_needed(int * seen)1644 clear_next_view_needed (int *seen)
1645 {
1646   *seen &= ~SEEN_NEXT_VIEW;
1647 }
1648 
1649 /* Test whether we have a pending request to emit the next view in
1650    *SEEN, and emit it if needed, clearing the request bit.  */
1651 
1652 static inline void
maybe_output_next_view(int * seen)1653 maybe_output_next_view (int *seen)
1654 {
1655   if ((*seen & SEEN_NEXT_VIEW) != 0)
1656     {
1657       clear_next_view_needed (seen);
1658       (*debug_hooks->source_line) (last_linenum, last_columnnum,
1659 				   last_filename, last_discriminator,
1660 				   false);
1661     }
1662 }
1663 
1664 /* We want to emit param bindings (before the first begin_stmt) in the
1665    initial view, if we are emitting views.  To that end, we may
1666    consume initial notes in the function, processing them in
1667    final_start_function, before signaling the beginning of the
1668    prologue, rather than in final.
1669 
1670    We don't test whether the DECLs are PARM_DECLs: the assumption is
1671    that there will be a NOTE_INSN_BEGIN_STMT marker before any
1672    non-parameter NOTE_INSN_VAR_LOCATION.  It's ok if the marker is not
1673    there, we'll just have more variable locations bound in the initial
1674    view, which is consistent with their being bound without any code
1675    that would give them a value.  */
1676 
1677 static inline bool
in_initial_view_p(rtx_insn * insn)1678 in_initial_view_p (rtx_insn *insn)
1679 {
1680   return (!DECL_IGNORED_P (current_function_decl)
1681 	  && debug_variable_location_views
1682 	  && insn && GET_CODE (insn) == NOTE
1683 	  && (NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
1684 	      || NOTE_KIND (insn) == NOTE_INSN_DELETED));
1685 }
1686 
1687 /* Output assembler code for the start of a function,
1688    and initialize some of the variables in this file
1689    for the new function.  The label for the function and associated
1690    assembler pseudo-ops have already been output in `assemble_start_function'.
1691 
1692    FIRST is the first insn of the rtl for the function being compiled.
1693    FILE is the file to write assembler code to.
1694    SEEN should be initially set to zero, and it may be updated to
1695    indicate we have references to the next location view, that would
1696    require us to emit it at the current PC.
1697    OPTIMIZE_P is nonzero if we should eliminate redundant
1698      test and compare insns.  */
1699 
1700 static void
final_start_function_1(rtx_insn ** firstp,FILE * file,int * seen,int optimize_p ATTRIBUTE_UNUSED)1701 final_start_function_1 (rtx_insn **firstp, FILE *file, int *seen,
1702 			int optimize_p ATTRIBUTE_UNUSED)
1703 {
1704   block_depth = 0;
1705 
1706   this_is_asm_operands = 0;
1707 
1708   need_profile_function = false;
1709 
1710   last_filename = LOCATION_FILE (prologue_location);
1711   last_linenum = LOCATION_LINE (prologue_location);
1712   last_columnnum = LOCATION_COLUMN (prologue_location);
1713   last_discriminator = discriminator = 0;
1714   last_bb_discriminator = bb_discriminator = 0;
1715 
1716   high_block_linenum = high_function_linenum = last_linenum;
1717 
1718   if (flag_sanitize & SANITIZE_ADDRESS)
1719     asan_function_start ();
1720 
1721   rtx_insn *first = *firstp;
1722   if (in_initial_view_p (first))
1723     {
1724       do
1725 	{
1726 	  final_scan_insn (first, file, 0, 0, seen);
1727 	  first = NEXT_INSN (first);
1728 	}
1729       while (in_initial_view_p (first));
1730       *firstp = first;
1731     }
1732 
1733   if (!DECL_IGNORED_P (current_function_decl))
1734     debug_hooks->begin_prologue (last_linenum, last_columnnum,
1735 				 last_filename);
1736 
1737   if (!dwarf2_debug_info_emitted_p (current_function_decl))
1738     dwarf2out_begin_prologue (0, 0, NULL);
1739 
1740 #ifdef LEAF_REG_REMAP
1741   if (crtl->uses_only_leaf_regs)
1742     leaf_renumber_regs (first);
1743 #endif
1744 
1745   /* The Sun386i and perhaps other machines don't work right
1746      if the profiling code comes after the prologue.  */
1747   if (targetm.profile_before_prologue () && crtl->profile)
1748     {
1749       if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1750 	  && targetm.have_prologue ())
1751 	{
1752 	  rtx_insn *insn;
1753 	  for (insn = first; insn; insn = NEXT_INSN (insn))
1754 	    if (!NOTE_P (insn))
1755 	      {
1756 		insn = NULL;
1757 		break;
1758 	      }
1759 	    else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1760 		     || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1761 	      break;
1762 	    else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1763 		     || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1764 	      continue;
1765 	    else
1766 	      {
1767 		insn = NULL;
1768 		break;
1769 	      }
1770 
1771 	  if (insn)
1772 	    need_profile_function = true;
1773 	  else
1774 	    profile_function (file);
1775 	}
1776       else
1777 	profile_function (file);
1778     }
1779 
1780   /* If debugging, assign block numbers to all of the blocks in this
1781      function.  */
1782   if (write_symbols)
1783     {
1784       reemit_insn_block_notes ();
1785       number_blocks (current_function_decl);
1786       /* We never actually put out begin/end notes for the top-level
1787 	 block in the function.  But, conceptually, that block is
1788 	 always needed.  */
1789       TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1790     }
1791 
1792   unsigned HOST_WIDE_INT min_frame_size
1793     = constant_lower_bound (get_frame_size ());
1794   if (min_frame_size > (unsigned HOST_WIDE_INT) warn_frame_larger_than_size)
1795     {
1796       /* Issue a warning */
1797       warning (OPT_Wframe_larger_than_,
1798 	       "the frame size of %wu bytes is larger than %wu bytes",
1799 	       min_frame_size, warn_frame_larger_than_size);
1800     }
1801 
1802   /* First output the function prologue: code to set up the stack frame.  */
1803   targetm.asm_out.function_prologue (file);
1804 
1805   /* If the machine represents the prologue as RTL, the profiling code must
1806      be emitted when NOTE_INSN_PROLOGUE_END is scanned.  */
1807   if (! targetm.have_prologue ())
1808     profile_after_prologue (file);
1809 }
1810 
1811 /* This is an exported final_start_function_1, callable without SEEN.  */
1812 
1813 void
final_start_function(rtx_insn * first,FILE * file,int optimize_p ATTRIBUTE_UNUSED)1814 final_start_function (rtx_insn *first, FILE *file,
1815 		      int optimize_p ATTRIBUTE_UNUSED)
1816 {
1817   int seen = 0;
1818   final_start_function_1 (&first, file, &seen, optimize_p);
1819   gcc_assert (seen == 0);
1820 }
1821 
1822 static void
profile_after_prologue(FILE * file ATTRIBUTE_UNUSED)1823 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1824 {
1825   if (!targetm.profile_before_prologue () && crtl->profile)
1826     profile_function (file);
1827 }
1828 
1829 static void
profile_function(FILE * file ATTRIBUTE_UNUSED)1830 profile_function (FILE *file ATTRIBUTE_UNUSED)
1831 {
1832 #ifndef NO_PROFILE_COUNTERS
1833 # define NO_PROFILE_COUNTERS	0
1834 #endif
1835 #ifdef ASM_OUTPUT_REG_PUSH
1836   rtx sval = NULL, chain = NULL;
1837 
1838   if (cfun->returns_struct)
1839     sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1840 					   true);
1841   if (cfun->static_chain_decl)
1842     chain = targetm.calls.static_chain (current_function_decl, true);
1843 #endif /* ASM_OUTPUT_REG_PUSH */
1844 
1845   if (! NO_PROFILE_COUNTERS)
1846     {
1847       int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1848       switch_to_section (data_section);
1849       ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1850       targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1851       assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1852     }
1853 
1854   switch_to_section (current_function_section ());
1855 
1856 #ifdef ASM_OUTPUT_REG_PUSH
1857   if (sval && REG_P (sval))
1858     ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1859   if (chain && REG_P (chain))
1860     ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1861 #endif
1862 
1863   FUNCTION_PROFILER (file, current_function_funcdef_no);
1864 
1865 #ifdef ASM_OUTPUT_REG_PUSH
1866   if (chain && REG_P (chain))
1867     ASM_OUTPUT_REG_POP (file, REGNO (chain));
1868   if (sval && REG_P (sval))
1869     ASM_OUTPUT_REG_POP (file, REGNO (sval));
1870 #endif
1871 }
1872 
1873 /* Output assembler code for the end of a function.
1874    For clarity, args are same as those of `final_start_function'
1875    even though not all of them are needed.  */
1876 
1877 void
final_end_function(void)1878 final_end_function (void)
1879 {
1880   app_disable ();
1881 
1882   if (!DECL_IGNORED_P (current_function_decl))
1883     debug_hooks->end_function (high_function_linenum);
1884 
1885   /* Finally, output the function epilogue:
1886      code to restore the stack frame and return to the caller.  */
1887   targetm.asm_out.function_epilogue (asm_out_file);
1888 
1889   /* And debug output.  */
1890   if (!DECL_IGNORED_P (current_function_decl))
1891     debug_hooks->end_epilogue (last_linenum, last_filename);
1892 
1893   if (!dwarf2_debug_info_emitted_p (current_function_decl)
1894       && dwarf2out_do_frame ())
1895     dwarf2out_end_epilogue (last_linenum, last_filename);
1896 
1897   some_local_dynamic_name = 0;
1898 }
1899 
1900 
1901 /* Dumper helper for basic block information. FILE is the assembly
1902    output file, and INSN is the instruction being emitted.  */
1903 
1904 static void
dump_basic_block_info(FILE * file,rtx_insn * insn,basic_block * start_to_bb,basic_block * end_to_bb,int bb_map_size,int * bb_seqn)1905 dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1906                        basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1907 {
1908   basic_block bb;
1909 
1910   if (!flag_debug_asm)
1911     return;
1912 
1913   if (INSN_UID (insn) < bb_map_size
1914       && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1915     {
1916       edge e;
1917       edge_iterator ei;
1918 
1919       fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1920       if (bb->count.initialized_p ())
1921 	{
1922           fprintf (file, ", count:");
1923 	  bb->count.dump (file);
1924 	}
1925       fprintf (file, " seq:%d", (*bb_seqn)++);
1926       fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1927       FOR_EACH_EDGE (e, ei, bb->preds)
1928         {
1929           dump_edge_info (file, e, TDF_DETAILS, 0);
1930         }
1931       fprintf (file, "\n");
1932     }
1933   if (INSN_UID (insn) < bb_map_size
1934       && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1935     {
1936       edge e;
1937       edge_iterator ei;
1938 
1939       fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1940       FOR_EACH_EDGE (e, ei, bb->succs)
1941        {
1942          dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1943        }
1944       fprintf (file, "\n");
1945     }
1946 }
1947 
1948 /* Output assembler code for some insns: all or part of a function.
1949    For description of args, see `final_start_function', above.  */
1950 
1951 static void
final_1(rtx_insn * first,FILE * file,int seen,int optimize_p)1952 final_1 (rtx_insn *first, FILE *file, int seen, int optimize_p)
1953 {
1954   rtx_insn *insn, *next;
1955 
1956   /* Used for -dA dump.  */
1957   basic_block *start_to_bb = NULL;
1958   basic_block *end_to_bb = NULL;
1959   int bb_map_size = 0;
1960   int bb_seqn = 0;
1961 
1962   last_ignored_compare = 0;
1963 
1964   if (HAVE_cc0)
1965     for (insn = first; insn; insn = NEXT_INSN (insn))
1966       {
1967 	/* If CC tracking across branches is enabled, record the insn which
1968 	   jumps to each branch only reached from one place.  */
1969 	if (optimize_p && JUMP_P (insn))
1970 	  {
1971 	    rtx lab = JUMP_LABEL (insn);
1972 	    if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
1973 	      {
1974 		LABEL_REFS (lab) = insn;
1975 	      }
1976 	  }
1977       }
1978 
1979   init_recog ();
1980 
1981   CC_STATUS_INIT;
1982 
1983   if (flag_debug_asm)
1984     {
1985       basic_block bb;
1986 
1987       bb_map_size = get_max_uid () + 1;
1988       start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1989       end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1990 
1991       /* There is no cfg for a thunk.  */
1992       if (!cfun->is_thunk)
1993 	FOR_EACH_BB_REVERSE_FN (bb, cfun)
1994 	  {
1995 	    start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
1996 	    end_to_bb[INSN_UID (BB_END (bb))] = bb;
1997 	  }
1998     }
1999 
2000   /* Output the insns.  */
2001   for (insn = first; insn;)
2002     {
2003       if (HAVE_ATTR_length)
2004 	{
2005 	  if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
2006 	    {
2007 	      /* This can be triggered by bugs elsewhere in the compiler if
2008 		 new insns are created after init_insn_lengths is called.  */
2009 	      gcc_assert (NOTE_P (insn));
2010 	      insn_current_address = -1;
2011 	    }
2012 	  else
2013 	    insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
2014 	  /* final can be seen as an iteration of shorten_branches that
2015 	     does nothing (since a fixed point has already been reached).  */
2016 	  insn_last_address = insn_current_address;
2017 	}
2018 
2019       dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
2020                              bb_map_size, &bb_seqn);
2021       insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
2022     }
2023 
2024   maybe_output_next_view (&seen);
2025 
2026   if (flag_debug_asm)
2027     {
2028       free (start_to_bb);
2029       free (end_to_bb);
2030     }
2031 
2032   /* Remove CFI notes, to avoid compare-debug failures.  */
2033   for (insn = first; insn; insn = next)
2034     {
2035       next = NEXT_INSN (insn);
2036       if (NOTE_P (insn)
2037 	  && (NOTE_KIND (insn) == NOTE_INSN_CFI
2038 	      || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2039 	delete_insn (insn);
2040     }
2041 }
2042 
2043 /* This is an exported final_1, callable without SEEN.  */
2044 
2045 void
final(rtx_insn * first,FILE * file,int optimize_p)2046 final (rtx_insn *first, FILE *file, int optimize_p)
2047 {
2048   /* Those that use the internal final_start_function_1/final_1 API
2049      skip initial debug bind notes in final_start_function_1, and pass
2050      the modified FIRST to final_1.  But those that use the public
2051      final_start_function/final APIs, final_start_function can't move
2052      FIRST because it's not passed by reference, so if they were
2053      skipped there, skip them again here.  */
2054   while (in_initial_view_p (first))
2055     first = NEXT_INSN (first);
2056 
2057   final_1 (first, file, 0, optimize_p);
2058 }
2059 
2060 const char *
get_insn_template(int code,rtx_insn * insn)2061 get_insn_template (int code, rtx_insn *insn)
2062 {
2063   switch (insn_data[code].output_format)
2064     {
2065     case INSN_OUTPUT_FORMAT_SINGLE:
2066       return insn_data[code].output.single;
2067     case INSN_OUTPUT_FORMAT_MULTI:
2068       return insn_data[code].output.multi[which_alternative];
2069     case INSN_OUTPUT_FORMAT_FUNCTION:
2070       gcc_assert (insn);
2071       return (*insn_data[code].output.function) (recog_data.operand, insn);
2072 
2073     default:
2074       gcc_unreachable ();
2075     }
2076 }
2077 
2078 /* Emit the appropriate declaration for an alternate-entry-point
2079    symbol represented by INSN, to FILE.  INSN is a CODE_LABEL with
2080    LABEL_KIND != LABEL_NORMAL.
2081 
2082    The case fall-through in this function is intentional.  */
2083 static void
output_alternate_entry_point(FILE * file,rtx_insn * insn)2084 output_alternate_entry_point (FILE *file, rtx_insn *insn)
2085 {
2086   const char *name = LABEL_NAME (insn);
2087 
2088   switch (LABEL_KIND (insn))
2089     {
2090     case LABEL_WEAK_ENTRY:
2091 #ifdef ASM_WEAKEN_LABEL
2092       ASM_WEAKEN_LABEL (file, name);
2093       gcc_fallthrough ();
2094 #endif
2095     case LABEL_GLOBAL_ENTRY:
2096       targetm.asm_out.globalize_label (file, name);
2097       gcc_fallthrough ();
2098     case LABEL_STATIC_ENTRY:
2099 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2100       ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2101 #endif
2102       ASM_OUTPUT_LABEL (file, name);
2103       break;
2104 
2105     case LABEL_NORMAL:
2106     default:
2107       gcc_unreachable ();
2108     }
2109 }
2110 
2111 /* Given a CALL_INSN, find and return the nested CALL. */
2112 static rtx
call_from_call_insn(rtx_call_insn * insn)2113 call_from_call_insn (rtx_call_insn *insn)
2114 {
2115   rtx x;
2116   gcc_assert (CALL_P (insn));
2117   x = PATTERN (insn);
2118 
2119   while (GET_CODE (x) != CALL)
2120     {
2121       switch (GET_CODE (x))
2122 	{
2123 	default:
2124 	  gcc_unreachable ();
2125 	case COND_EXEC:
2126 	  x = COND_EXEC_CODE (x);
2127 	  break;
2128 	case PARALLEL:
2129 	  x = XVECEXP (x, 0, 0);
2130 	  break;
2131 	case SET:
2132 	  x = XEXP (x, 1);
2133 	  break;
2134 	}
2135     }
2136   return x;
2137 }
2138 
2139 /* Print a comment into the asm showing FILENAME, LINENUM, and the
2140    corresponding source line, if available.  */
2141 
2142 static void
asm_show_source(const char * filename,int linenum)2143 asm_show_source (const char *filename, int linenum)
2144 {
2145   if (!filename)
2146     return;
2147 
2148   char_span line = location_get_source_line (filename, linenum);
2149   if (!line)
2150     return;
2151 
2152   fprintf (asm_out_file, "%s %s:%i: ", ASM_COMMENT_START, filename, linenum);
2153   /* "line" is not 0-terminated, so we must use its length.  */
2154   fwrite (line.get_buffer (), 1, line.length (), asm_out_file);
2155   fputc ('\n', asm_out_file);
2156 }
2157 
2158 /* The final scan for one insn, INSN.
2159    Args are same as in `final', except that INSN
2160    is the insn being scanned.
2161    Value returned is the next insn to be scanned.
2162 
2163    NOPEEPHOLES is the flag to disallow peephole processing (currently
2164    used for within delayed branch sequence output).
2165 
2166    SEEN is used to track the end of the prologue, for emitting
2167    debug information.  We force the emission of a line note after
2168    both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG.  */
2169 
2170 static rtx_insn *
final_scan_insn_1(rtx_insn * insn,FILE * file,int optimize_p ATTRIBUTE_UNUSED,int nopeepholes ATTRIBUTE_UNUSED,int * seen)2171 final_scan_insn_1 (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2172 		   int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2173 {
2174 #if HAVE_cc0
2175   rtx set;
2176 #endif
2177   rtx_insn *next;
2178   rtx_jump_table_data *table;
2179 
2180   insn_counter++;
2181 
2182   /* Ignore deleted insns.  These can occur when we split insns (due to a
2183      template of "#") while not optimizing.  */
2184   if (insn->deleted ())
2185     return NEXT_INSN (insn);
2186 
2187   switch (GET_CODE (insn))
2188     {
2189     case NOTE:
2190       switch (NOTE_KIND (insn))
2191 	{
2192 	case NOTE_INSN_DELETED:
2193 	case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2194 	  break;
2195 
2196 	case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2197 	  maybe_output_next_view (seen);
2198 
2199 	  output_function_exception_table (0);
2200 
2201 	  if (targetm.asm_out.unwind_emit)
2202 	    targetm.asm_out.unwind_emit (asm_out_file, insn);
2203 
2204 	  in_cold_section_p = !in_cold_section_p;
2205 
2206 	  if (in_cold_section_p)
2207 	    cold_function_name
2208 	      = clone_function_name (current_function_decl, "cold");
2209 
2210 	  if (dwarf2out_do_frame ())
2211 	    {
2212 	      dwarf2out_switch_text_section ();
2213 	      if (!dwarf2_debug_info_emitted_p (current_function_decl)
2214 		  && !DECL_IGNORED_P (current_function_decl))
2215 		debug_hooks->switch_text_section ();
2216 	    }
2217 	  else if (!DECL_IGNORED_P (current_function_decl))
2218 	    debug_hooks->switch_text_section ();
2219 
2220 	  switch_to_section (current_function_section ());
2221 	  targetm.asm_out.function_switched_text_sections (asm_out_file,
2222 							   current_function_decl,
2223 							   in_cold_section_p);
2224 	  /* Emit a label for the split cold section.  Form label name by
2225 	     suffixing "cold" to the original function's name.  */
2226 	  if (in_cold_section_p)
2227 	    {
2228 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2229 	      ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2230 					      IDENTIFIER_POINTER
2231 					          (cold_function_name),
2232 					      current_function_decl);
2233 #else
2234 	      ASM_OUTPUT_LABEL (asm_out_file,
2235 				IDENTIFIER_POINTER (cold_function_name));
2236 #endif
2237 	      if (dwarf2out_do_frame ()
2238 	          && cfun->fde->dw_fde_second_begin != NULL)
2239 		ASM_OUTPUT_LABEL (asm_out_file, cfun->fde->dw_fde_second_begin);
2240 	    }
2241 	  break;
2242 
2243 	case NOTE_INSN_BASIC_BLOCK:
2244 	  if (need_profile_function)
2245 	    {
2246 	      profile_function (asm_out_file);
2247 	      need_profile_function = false;
2248 	    }
2249 
2250 	  if (targetm.asm_out.unwind_emit)
2251 	    targetm.asm_out.unwind_emit (asm_out_file, insn);
2252 
2253 	  bb_discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2254 	  break;
2255 
2256 	case NOTE_INSN_EH_REGION_BEG:
2257 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2258 				  NOTE_EH_HANDLER (insn));
2259 	  break;
2260 
2261 	case NOTE_INSN_EH_REGION_END:
2262 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2263 				  NOTE_EH_HANDLER (insn));
2264 	  break;
2265 
2266 	case NOTE_INSN_PROLOGUE_END:
2267 	  targetm.asm_out.function_end_prologue (file);
2268 	  profile_after_prologue (file);
2269 
2270 	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2271 	    {
2272 	      *seen |= SEEN_EMITTED;
2273 	      force_source_line = true;
2274 	    }
2275 	  else
2276 	    *seen |= SEEN_NOTE;
2277 
2278 	  break;
2279 
2280 	case NOTE_INSN_EPILOGUE_BEG:
2281           if (!DECL_IGNORED_P (current_function_decl))
2282             (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2283 	  targetm.asm_out.function_begin_epilogue (file);
2284 	  break;
2285 
2286 	case NOTE_INSN_CFI:
2287 	  dwarf2out_emit_cfi (NOTE_CFI (insn));
2288 	  break;
2289 
2290 	case NOTE_INSN_CFI_LABEL:
2291 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2292 				  NOTE_LABEL_NUMBER (insn));
2293 	  break;
2294 
2295 	case NOTE_INSN_FUNCTION_BEG:
2296 	  if (need_profile_function)
2297 	    {
2298 	      profile_function (asm_out_file);
2299 	      need_profile_function = false;
2300 	    }
2301 
2302 	  app_disable ();
2303 	  if (!DECL_IGNORED_P (current_function_decl))
2304 	    debug_hooks->end_prologue (last_linenum, last_filename);
2305 
2306 	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2307 	    {
2308 	      *seen |= SEEN_EMITTED;
2309 	      force_source_line = true;
2310 	    }
2311 	  else
2312 	    *seen |= SEEN_NOTE;
2313 
2314 	  break;
2315 
2316 	case NOTE_INSN_BLOCK_BEG:
2317 	  if (debug_info_level == DINFO_LEVEL_NORMAL
2318 	      || debug_info_level == DINFO_LEVEL_VERBOSE
2319 	      || write_symbols == DWARF2_DEBUG
2320 	      || write_symbols == VMS_AND_DWARF2_DEBUG
2321 	      || write_symbols == VMS_DEBUG)
2322 	    {
2323 	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2324 
2325 	      app_disable ();
2326 	      ++block_depth;
2327 	      high_block_linenum = last_linenum;
2328 
2329 	      /* Output debugging info about the symbol-block beginning.  */
2330 	      if (!DECL_IGNORED_P (current_function_decl))
2331 		debug_hooks->begin_block (last_linenum, n);
2332 
2333 	      /* Mark this block as output.  */
2334 	      TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2335 	      BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn)) = in_cold_section_p;
2336 	    }
2337 	  if (write_symbols == DBX_DEBUG)
2338 	    {
2339 	      location_t *locus_ptr
2340 		= block_nonartificial_location (NOTE_BLOCK (insn));
2341 
2342 	      if (locus_ptr != NULL)
2343 		{
2344 		  override_filename = LOCATION_FILE (*locus_ptr);
2345 		  override_linenum = LOCATION_LINE (*locus_ptr);
2346 		  override_columnnum = LOCATION_COLUMN (*locus_ptr);
2347 		  override_discriminator = compute_discriminator (*locus_ptr);
2348 		}
2349 	    }
2350 	  break;
2351 
2352 	case NOTE_INSN_BLOCK_END:
2353 	  maybe_output_next_view (seen);
2354 
2355 	  if (debug_info_level == DINFO_LEVEL_NORMAL
2356 	      || debug_info_level == DINFO_LEVEL_VERBOSE
2357 	      || write_symbols == DWARF2_DEBUG
2358 	      || write_symbols == VMS_AND_DWARF2_DEBUG
2359 	      || write_symbols == VMS_DEBUG)
2360 	    {
2361 	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2362 
2363 	      app_disable ();
2364 
2365 	      /* End of a symbol-block.  */
2366 	      --block_depth;
2367 	      gcc_assert (block_depth >= 0);
2368 
2369 	      if (!DECL_IGNORED_P (current_function_decl))
2370 		debug_hooks->end_block (high_block_linenum, n);
2371 	      gcc_assert (BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn))
2372 			  == in_cold_section_p);
2373 	    }
2374 	  if (write_symbols == DBX_DEBUG)
2375 	    {
2376 	      tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2377 	      location_t *locus_ptr
2378 		= block_nonartificial_location (outer_block);
2379 
2380 	      if (locus_ptr != NULL)
2381 		{
2382 		  override_filename = LOCATION_FILE (*locus_ptr);
2383 		  override_linenum = LOCATION_LINE (*locus_ptr);
2384 		  override_columnnum = LOCATION_COLUMN (*locus_ptr);
2385 		  override_discriminator = compute_discriminator (*locus_ptr);
2386 		}
2387 	      else
2388 		{
2389 		  override_filename = NULL;
2390 		  override_linenum = 0;
2391 		  override_columnnum = 0;
2392 		  override_discriminator = 0;
2393 		}
2394 	    }
2395 	  break;
2396 
2397 	case NOTE_INSN_DELETED_LABEL:
2398 	  /* Emit the label.  We may have deleted the CODE_LABEL because
2399 	     the label could be proved to be unreachable, though still
2400 	     referenced (in the form of having its address taken.  */
2401 	  ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2402 	  break;
2403 
2404 	case NOTE_INSN_DELETED_DEBUG_LABEL:
2405 	  /* Similarly, but need to use different namespace for it.  */
2406 	  if (CODE_LABEL_NUMBER (insn) != -1)
2407 	    ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2408 	  break;
2409 
2410 	case NOTE_INSN_VAR_LOCATION:
2411 	  if (!DECL_IGNORED_P (current_function_decl))
2412 	    {
2413 	      debug_hooks->var_location (insn);
2414 	      set_next_view_needed (seen);
2415 	    }
2416 	  break;
2417 
2418 	case NOTE_INSN_BEGIN_STMT:
2419 	  gcc_checking_assert (cfun->debug_nonbind_markers);
2420 	  if (!DECL_IGNORED_P (current_function_decl)
2421 	      && notice_source_line (insn, NULL))
2422 	    {
2423 	    output_source_line:
2424 	      (*debug_hooks->source_line) (last_linenum, last_columnnum,
2425 					   last_filename, last_discriminator,
2426 					   true);
2427 	      clear_next_view_needed (seen);
2428 	    }
2429 	  break;
2430 
2431 	case NOTE_INSN_INLINE_ENTRY:
2432 	  gcc_checking_assert (cfun->debug_nonbind_markers);
2433 	  if (!DECL_IGNORED_P (current_function_decl)
2434 	      && notice_source_line (insn, NULL))
2435 	    {
2436 	      (*debug_hooks->inline_entry) (LOCATION_BLOCK
2437 					    (NOTE_MARKER_LOCATION (insn)));
2438 	      goto output_source_line;
2439 	    }
2440 	  break;
2441 
2442 	default:
2443 	  gcc_unreachable ();
2444 	  break;
2445 	}
2446       break;
2447 
2448     case BARRIER:
2449       break;
2450 
2451     case CODE_LABEL:
2452       /* The target port might emit labels in the output function for
2453 	 some insn, e.g. sh.c output_branchy_insn.  */
2454       if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2455 	{
2456 	  align_flags alignment = LABEL_TO_ALIGNMENT (insn);
2457 	  if (alignment.levels[0].log && NEXT_INSN (insn))
2458 	    {
2459 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2460 	      /* Output both primary and secondary alignment.  */
2461 	      ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[0].log,
2462 					 alignment.levels[0].maxskip);
2463 	      ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[1].log,
2464 					 alignment.levels[1].maxskip);
2465 #else
2466 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2467               ASM_OUTPUT_ALIGN_WITH_NOP (file, alignment.levels[0].log);
2468 #else
2469 	      ASM_OUTPUT_ALIGN (file, alignment.levels[0].log);
2470 #endif
2471 #endif
2472 	    }
2473 	}
2474       CC_STATUS_INIT;
2475 
2476       if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2477 	debug_hooks->label (as_a <rtx_code_label *> (insn));
2478 
2479       app_disable ();
2480 
2481       /* If this label is followed by a jump-table, make sure we put
2482 	 the label in the read-only section.  Also possibly write the
2483 	 label and jump table together.  */
2484       table = jump_table_for_label (as_a <rtx_code_label *> (insn));
2485       if (table)
2486 	{
2487 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2488 	  /* In this case, the case vector is being moved by the
2489 	     target, so don't output the label at all.  Leave that
2490 	     to the back end macros.  */
2491 #else
2492 	  if (! JUMP_TABLES_IN_TEXT_SECTION)
2493 	    {
2494 	      int log_align;
2495 
2496 	      switch_to_section (targetm.asm_out.function_rodata_section
2497 				 (current_function_decl));
2498 
2499 #ifdef ADDR_VEC_ALIGN
2500 	      log_align = ADDR_VEC_ALIGN (table);
2501 #else
2502 	      log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2503 #endif
2504 	      ASM_OUTPUT_ALIGN (file, log_align);
2505 	    }
2506 	  else
2507 	    switch_to_section (current_function_section ());
2508 
2509 #ifdef ASM_OUTPUT_CASE_LABEL
2510 	  ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn), table);
2511 #else
2512 	  targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2513 #endif
2514 #endif
2515 	  break;
2516 	}
2517       if (LABEL_ALT_ENTRY_P (insn))
2518 	output_alternate_entry_point (file, insn);
2519       else
2520 	targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2521       break;
2522 
2523     default:
2524       {
2525 	rtx body = PATTERN (insn);
2526 	int insn_code_number;
2527 	const char *templ;
2528 	bool is_stmt, *is_stmt_p;
2529 
2530 	if (MAY_HAVE_DEBUG_MARKER_INSNS && cfun->debug_nonbind_markers)
2531 	  {
2532 	    is_stmt = false;
2533 	    is_stmt_p = NULL;
2534 	  }
2535 	else
2536 	  is_stmt_p = &is_stmt;
2537 
2538 	/* Reset this early so it is correct for ASM statements.  */
2539 	current_insn_predicate = NULL_RTX;
2540 
2541 	/* An INSN, JUMP_INSN or CALL_INSN.
2542 	   First check for special kinds that recog doesn't recognize.  */
2543 
2544 	if (GET_CODE (body) == USE /* These are just declarations.  */
2545 	    || GET_CODE (body) == CLOBBER)
2546 	  break;
2547 
2548 #if HAVE_cc0
2549 	{
2550 	  /* If there is a REG_CC_SETTER note on this insn, it means that
2551 	     the setting of the condition code was done in the delay slot
2552 	     of the insn that branched here.  So recover the cc status
2553 	     from the insn that set it.  */
2554 
2555 	  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2556 	  if (note)
2557 	    {
2558 	      rtx_insn *other = as_a <rtx_insn *> (XEXP (note, 0));
2559 	      NOTICE_UPDATE_CC (PATTERN (other), other);
2560 	      cc_prev_status = cc_status;
2561 	    }
2562 	}
2563 #endif
2564 
2565 	/* Detect insns that are really jump-tables
2566 	   and output them as such.  */
2567 
2568         if (JUMP_TABLE_DATA_P (insn))
2569 	  {
2570 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2571 	    int vlen, idx;
2572 #endif
2573 
2574 	    if (! JUMP_TABLES_IN_TEXT_SECTION)
2575 	      switch_to_section (targetm.asm_out.function_rodata_section
2576 				 (current_function_decl));
2577 	    else
2578 	      switch_to_section (current_function_section ());
2579 
2580 	    app_disable ();
2581 
2582 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2583 	    if (GET_CODE (body) == ADDR_VEC)
2584 	      {
2585 #ifdef ASM_OUTPUT_ADDR_VEC
2586 		ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2587 #else
2588 		gcc_unreachable ();
2589 #endif
2590 	      }
2591 	    else
2592 	      {
2593 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2594 		ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2595 #else
2596 		gcc_unreachable ();
2597 #endif
2598 	      }
2599 #else
2600 	    vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2601 	    for (idx = 0; idx < vlen; idx++)
2602 	      {
2603 		if (GET_CODE (body) == ADDR_VEC)
2604 		  {
2605 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2606 		    ASM_OUTPUT_ADDR_VEC_ELT
2607 		      (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2608 #else
2609 		    gcc_unreachable ();
2610 #endif
2611 		  }
2612 		else
2613 		  {
2614 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2615 		    ASM_OUTPUT_ADDR_DIFF_ELT
2616 		      (file,
2617 		       body,
2618 		       CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2619 		       CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2620 #else
2621 		    gcc_unreachable ();
2622 #endif
2623 		  }
2624 	      }
2625 #ifdef ASM_OUTPUT_CASE_END
2626 	    ASM_OUTPUT_CASE_END (file,
2627 				 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2628 				 insn);
2629 #endif
2630 #endif
2631 
2632 	    switch_to_section (current_function_section ());
2633 
2634 	    if (debug_variable_location_views
2635 		&& !DECL_IGNORED_P (current_function_decl))
2636 	      debug_hooks->var_location (insn);
2637 
2638 	    break;
2639 	  }
2640 	/* Output this line note if it is the first or the last line
2641 	   note in a row.  */
2642 	if (!DECL_IGNORED_P (current_function_decl)
2643 	    && notice_source_line (insn, is_stmt_p))
2644 	  {
2645 	    if (flag_verbose_asm)
2646 	      asm_show_source (last_filename, last_linenum);
2647 	    (*debug_hooks->source_line) (last_linenum, last_columnnum,
2648 					 last_filename, last_discriminator,
2649 					 is_stmt);
2650 	    clear_next_view_needed (seen);
2651 	  }
2652 	else
2653 	  maybe_output_next_view (seen);
2654 
2655 	gcc_checking_assert (!DEBUG_INSN_P (insn));
2656 
2657 	if (GET_CODE (body) == PARALLEL
2658 	    && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2659 	  body = XVECEXP (body, 0, 0);
2660 
2661 	if (GET_CODE (body) == ASM_INPUT)
2662 	  {
2663 	    const char *string = XSTR (body, 0);
2664 
2665 	    /* There's no telling what that did to the condition codes.  */
2666 	    CC_STATUS_INIT;
2667 
2668 	    if (string[0])
2669 	      {
2670 		expanded_location loc;
2671 
2672 		app_enable ();
2673 		loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2674 		if (*loc.file && loc.line)
2675 		  fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2676 			   ASM_COMMENT_START, loc.line, loc.file);
2677 		fprintf (asm_out_file, "\t%s\n", string);
2678 #if HAVE_AS_LINE_ZERO
2679 		if (*loc.file && loc.line)
2680 		  fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2681 #endif
2682 	      }
2683 	    break;
2684 	  }
2685 
2686 	/* Detect `asm' construct with operands.  */
2687 	if (asm_noperands (body) >= 0)
2688 	  {
2689 	    unsigned int noperands = asm_noperands (body);
2690 	    rtx *ops = XALLOCAVEC (rtx, noperands);
2691 	    const char *string;
2692 	    location_t loc;
2693 	    expanded_location expanded;
2694 
2695 	    /* There's no telling what that did to the condition codes.  */
2696 	    CC_STATUS_INIT;
2697 
2698 	    /* Get out the operand values.  */
2699 	    string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2700 	    /* Inhibit dying on what would otherwise be compiler bugs.  */
2701 	    insn_noperands = noperands;
2702 	    this_is_asm_operands = insn;
2703 	    expanded = expand_location (loc);
2704 
2705 #ifdef FINAL_PRESCAN_INSN
2706 	    FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2707 #endif
2708 
2709 	    /* Output the insn using them.  */
2710 	    if (string[0])
2711 	      {
2712 		app_enable ();
2713 		if (expanded.file && expanded.line)
2714 		  fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2715 			   ASM_COMMENT_START, expanded.line, expanded.file);
2716 	        output_asm_insn (string, ops);
2717 #if HAVE_AS_LINE_ZERO
2718 		if (expanded.file && expanded.line)
2719 		  fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2720 #endif
2721 	      }
2722 
2723 	    if (targetm.asm_out.final_postscan_insn)
2724 	      targetm.asm_out.final_postscan_insn (file, insn, ops,
2725 						   insn_noperands);
2726 
2727 	    this_is_asm_operands = 0;
2728 	    break;
2729 	  }
2730 
2731 	app_disable ();
2732 
2733 	if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2734 	  {
2735 	    /* A delayed-branch sequence */
2736 	    int i;
2737 
2738 	    final_sequence = seq;
2739 
2740 	    /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2741 	       force the restoration of a comparison that was previously
2742 	       thought unnecessary.  If that happens, cancel this sequence
2743 	       and cause that insn to be restored.  */
2744 
2745 	    next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2746 	    if (next != seq->insn (1))
2747 	      {
2748 		final_sequence = 0;
2749 		return next;
2750 	      }
2751 
2752 	    for (i = 1; i < seq->len (); i++)
2753 	      {
2754 		rtx_insn *insn = seq->insn (i);
2755 		rtx_insn *next = NEXT_INSN (insn);
2756 		/* We loop in case any instruction in a delay slot gets
2757 		   split.  */
2758 		do
2759 		  insn = final_scan_insn (insn, file, 0, 1, seen);
2760 		while (insn != next);
2761 	      }
2762 #ifdef DBR_OUTPUT_SEQEND
2763 	    DBR_OUTPUT_SEQEND (file);
2764 #endif
2765 	    final_sequence = 0;
2766 
2767 	    /* If the insn requiring the delay slot was a CALL_INSN, the
2768 	       insns in the delay slot are actually executed before the
2769 	       called function.  Hence we don't preserve any CC-setting
2770 	       actions in these insns and the CC must be marked as being
2771 	       clobbered by the function.  */
2772 	    if (CALL_P (seq->insn (0)))
2773 	      {
2774 		CC_STATUS_INIT;
2775 	      }
2776 	    break;
2777 	  }
2778 
2779 	/* We have a real machine instruction as rtl.  */
2780 
2781 	body = PATTERN (insn);
2782 
2783 #if HAVE_cc0
2784 	set = single_set (insn);
2785 
2786 	/* Check for redundant test and compare instructions
2787 	   (when the condition codes are already set up as desired).
2788 	   This is done only when optimizing; if not optimizing,
2789 	   it should be possible for the user to alter a variable
2790 	   with the debugger in between statements
2791 	   and the next statement should reexamine the variable
2792 	   to compute the condition codes.  */
2793 
2794 	if (optimize_p)
2795 	  {
2796 	    if (set
2797 		&& GET_CODE (SET_DEST (set)) == CC0
2798 		&& insn != last_ignored_compare)
2799 	      {
2800 		rtx src1, src2;
2801 		if (GET_CODE (SET_SRC (set)) == SUBREG)
2802 		  SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2803 
2804 		src1 = SET_SRC (set);
2805 		src2 = NULL_RTX;
2806 		if (GET_CODE (SET_SRC (set)) == COMPARE)
2807 		  {
2808 		    if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2809 		      XEXP (SET_SRC (set), 0)
2810 			= alter_subreg (&XEXP (SET_SRC (set), 0), true);
2811 		    if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2812 		      XEXP (SET_SRC (set), 1)
2813 			= alter_subreg (&XEXP (SET_SRC (set), 1), true);
2814 		    if (XEXP (SET_SRC (set), 1)
2815 			== CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2816 		      src2 = XEXP (SET_SRC (set), 0);
2817 		  }
2818 		if ((cc_status.value1 != 0
2819 		     && rtx_equal_p (src1, cc_status.value1))
2820 		    || (cc_status.value2 != 0
2821 			&& rtx_equal_p (src1, cc_status.value2))
2822 		    || (src2 != 0 && cc_status.value1 != 0
2823 		        && rtx_equal_p (src2, cc_status.value1))
2824 		    || (src2 != 0 && cc_status.value2 != 0
2825 			&& rtx_equal_p (src2, cc_status.value2)))
2826 		  {
2827 		    /* Don't delete insn if it has an addressing side-effect.  */
2828 		    if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2829 			/* or if anything in it is volatile.  */
2830 			&& ! volatile_refs_p (PATTERN (insn)))
2831 		      {
2832 			/* We don't really delete the insn; just ignore it.  */
2833 			last_ignored_compare = insn;
2834 			break;
2835 		      }
2836 		  }
2837 	      }
2838 	  }
2839 
2840 	/* If this is a conditional branch, maybe modify it
2841 	   if the cc's are in a nonstandard state
2842 	   so that it accomplishes the same thing that it would
2843 	   do straightforwardly if the cc's were set up normally.  */
2844 
2845 	if (cc_status.flags != 0
2846 	    && JUMP_P (insn)
2847 	    && GET_CODE (body) == SET
2848 	    && SET_DEST (body) == pc_rtx
2849 	    && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2850 	    && COMPARISON_P (XEXP (SET_SRC (body), 0))
2851 	    && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2852 	  {
2853 	    /* This function may alter the contents of its argument
2854 	       and clear some of the cc_status.flags bits.
2855 	       It may also return 1 meaning condition now always true
2856 	       or -1 meaning condition now always false
2857 	       or 2 meaning condition nontrivial but altered.  */
2858 	    int result = alter_cond (XEXP (SET_SRC (body), 0));
2859 	    /* If condition now has fixed value, replace the IF_THEN_ELSE
2860 	       with its then-operand or its else-operand.  */
2861 	    if (result == 1)
2862 	      SET_SRC (body) = XEXP (SET_SRC (body), 1);
2863 	    if (result == -1)
2864 	      SET_SRC (body) = XEXP (SET_SRC (body), 2);
2865 
2866 	    /* The jump is now either unconditional or a no-op.
2867 	       If it has become a no-op, don't try to output it.
2868 	       (It would not be recognized.)  */
2869 	    if (SET_SRC (body) == pc_rtx)
2870 	      {
2871 	        delete_insn (insn);
2872 		break;
2873 	      }
2874 	    else if (ANY_RETURN_P (SET_SRC (body)))
2875 	      /* Replace (set (pc) (return)) with (return).  */
2876 	      PATTERN (insn) = body = SET_SRC (body);
2877 
2878 	    /* Rerecognize the instruction if it has changed.  */
2879 	    if (result != 0)
2880 	      INSN_CODE (insn) = -1;
2881 	  }
2882 
2883 	/* If this is a conditional trap, maybe modify it if the cc's
2884 	   are in a nonstandard state so that it accomplishes the same
2885 	   thing that it would do straightforwardly if the cc's were
2886 	   set up normally.  */
2887 	if (cc_status.flags != 0
2888 	    && NONJUMP_INSN_P (insn)
2889 	    && GET_CODE (body) == TRAP_IF
2890 	    && COMPARISON_P (TRAP_CONDITION (body))
2891 	    && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2892 	  {
2893 	    /* This function may alter the contents of its argument
2894 	       and clear some of the cc_status.flags bits.
2895 	       It may also return 1 meaning condition now always true
2896 	       or -1 meaning condition now always false
2897 	       or 2 meaning condition nontrivial but altered.  */
2898 	    int result = alter_cond (TRAP_CONDITION (body));
2899 
2900 	    /* If TRAP_CONDITION has become always false, delete the
2901 	       instruction.  */
2902 	    if (result == -1)
2903 	      {
2904 		delete_insn (insn);
2905 		break;
2906 	      }
2907 
2908 	    /* If TRAP_CONDITION has become always true, replace
2909 	       TRAP_CONDITION with const_true_rtx.  */
2910 	    if (result == 1)
2911 	      TRAP_CONDITION (body) = const_true_rtx;
2912 
2913 	    /* Rerecognize the instruction if it has changed.  */
2914 	    if (result != 0)
2915 	      INSN_CODE (insn) = -1;
2916 	  }
2917 
2918 	/* Make same adjustments to instructions that examine the
2919 	   condition codes without jumping and instructions that
2920 	   handle conditional moves (if this machine has either one).  */
2921 
2922 	if (cc_status.flags != 0
2923 	    && set != 0)
2924 	  {
2925 	    rtx cond_rtx, then_rtx, else_rtx;
2926 
2927 	    if (!JUMP_P (insn)
2928 		&& GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2929 	      {
2930 		cond_rtx = XEXP (SET_SRC (set), 0);
2931 		then_rtx = XEXP (SET_SRC (set), 1);
2932 		else_rtx = XEXP (SET_SRC (set), 2);
2933 	      }
2934 	    else
2935 	      {
2936 		cond_rtx = SET_SRC (set);
2937 		then_rtx = const_true_rtx;
2938 		else_rtx = const0_rtx;
2939 	      }
2940 
2941 	    if (COMPARISON_P (cond_rtx)
2942 		&& XEXP (cond_rtx, 0) == cc0_rtx)
2943 	      {
2944 		int result;
2945 		result = alter_cond (cond_rtx);
2946 		if (result == 1)
2947 		  validate_change (insn, &SET_SRC (set), then_rtx, 0);
2948 		else if (result == -1)
2949 		  validate_change (insn, &SET_SRC (set), else_rtx, 0);
2950 		else if (result == 2)
2951 		  INSN_CODE (insn) = -1;
2952 		if (SET_DEST (set) == SET_SRC (set))
2953 		  delete_insn (insn);
2954 	      }
2955 	  }
2956 
2957 #endif
2958 
2959 	/* Do machine-specific peephole optimizations if desired.  */
2960 
2961 	if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
2962 	  {
2963 	    rtx_insn *next = peephole (insn);
2964 	    /* When peepholing, if there were notes within the peephole,
2965 	       emit them before the peephole.  */
2966 	    if (next != 0 && next != NEXT_INSN (insn))
2967 	      {
2968 		rtx_insn *note, *prev = PREV_INSN (insn);
2969 
2970 		for (note = NEXT_INSN (insn); note != next;
2971 		     note = NEXT_INSN (note))
2972 		  final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2973 
2974 		/* Put the notes in the proper position for a later
2975 		   rescan.  For example, the SH target can do this
2976 		   when generating a far jump in a delayed branch
2977 		   sequence.  */
2978 		note = NEXT_INSN (insn);
2979 		SET_PREV_INSN (note) = prev;
2980 		SET_NEXT_INSN (prev) = note;
2981 		SET_NEXT_INSN (PREV_INSN (next)) = insn;
2982 		SET_PREV_INSN (insn) = PREV_INSN (next);
2983 		SET_NEXT_INSN (insn) = next;
2984 		SET_PREV_INSN (next) = insn;
2985 	      }
2986 
2987 	    /* PEEPHOLE might have changed this.  */
2988 	    body = PATTERN (insn);
2989 	  }
2990 
2991 	/* Try to recognize the instruction.
2992 	   If successful, verify that the operands satisfy the
2993 	   constraints for the instruction.  Crash if they don't,
2994 	   since `reload' should have changed them so that they do.  */
2995 
2996 	insn_code_number = recog_memoized (insn);
2997 	cleanup_subreg_operands (insn);
2998 
2999 	/* Dump the insn in the assembly for debugging (-dAP).
3000 	   If the final dump is requested as slim RTL, dump slim
3001 	   RTL to the assembly file also.  */
3002 	if (flag_dump_rtl_in_asm)
3003 	  {
3004 	    print_rtx_head = ASM_COMMENT_START;
3005 	    if (! (dump_flags & TDF_SLIM))
3006 	      print_rtl_single (asm_out_file, insn);
3007 	    else
3008 	      dump_insn_slim (asm_out_file, insn);
3009 	    print_rtx_head = "";
3010 	  }
3011 
3012 	if (! constrain_operands_cached (insn, 1))
3013 	  fatal_insn_not_found (insn);
3014 
3015 	/* Some target machines need to prescan each insn before
3016 	   it is output.  */
3017 
3018 #ifdef FINAL_PRESCAN_INSN
3019 	FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
3020 #endif
3021 
3022 	if (targetm.have_conditional_execution ()
3023 	    && GET_CODE (PATTERN (insn)) == COND_EXEC)
3024 	  current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
3025 
3026 #if HAVE_cc0
3027 	cc_prev_status = cc_status;
3028 
3029 	/* Update `cc_status' for this instruction.
3030 	   The instruction's output routine may change it further.
3031 	   If the output routine for a jump insn needs to depend
3032 	   on the cc status, it should look at cc_prev_status.  */
3033 
3034 	NOTICE_UPDATE_CC (body, insn);
3035 #endif
3036 
3037 	current_output_insn = debug_insn = insn;
3038 
3039 	/* Find the proper template for this insn.  */
3040 	templ = get_insn_template (insn_code_number, insn);
3041 
3042 	/* If the C code returns 0, it means that it is a jump insn
3043 	   which follows a deleted test insn, and that test insn
3044 	   needs to be reinserted.  */
3045 	if (templ == 0)
3046 	  {
3047 	    rtx_insn *prev;
3048 
3049 	    gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
3050 
3051 	    /* We have already processed the notes between the setter and
3052 	       the user.  Make sure we don't process them again, this is
3053 	       particularly important if one of the notes is a block
3054 	       scope note or an EH note.  */
3055 	    for (prev = insn;
3056 		 prev != last_ignored_compare;
3057 		 prev = PREV_INSN (prev))
3058 	      {
3059 		if (NOTE_P (prev))
3060 		  delete_insn (prev);	/* Use delete_note.  */
3061 	      }
3062 
3063 	    return prev;
3064 	  }
3065 
3066 	/* If the template is the string "#", it means that this insn must
3067 	   be split.  */
3068 	if (templ[0] == '#' && templ[1] == '\0')
3069 	  {
3070 	    rtx_insn *new_rtx = try_split (body, insn, 0);
3071 
3072 	    /* If we didn't split the insn, go away.  */
3073 	    if (new_rtx == insn && PATTERN (new_rtx) == body)
3074 	      fatal_insn ("could not split insn", insn);
3075 
3076 	    /* If we have a length attribute, this instruction should have
3077 	       been split in shorten_branches, to ensure that we would have
3078 	       valid length info for the splitees.  */
3079 	    gcc_assert (!HAVE_ATTR_length);
3080 
3081 	    return new_rtx;
3082 	  }
3083 
3084 	/* ??? This will put the directives in the wrong place if
3085 	   get_insn_template outputs assembly directly.  However calling it
3086 	   before get_insn_template breaks if the insns is split.  */
3087 	if (targetm.asm_out.unwind_emit_before_insn
3088 	    && targetm.asm_out.unwind_emit)
3089 	  targetm.asm_out.unwind_emit (asm_out_file, insn);
3090 
3091 	rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn);
3092 	if (call_insn != NULL)
3093 	  {
3094 	    rtx x = call_from_call_insn (call_insn);
3095 	    x = XEXP (x, 0);
3096 	    if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3097 	      {
3098 		tree t;
3099 		x = XEXP (x, 0);
3100 		t = SYMBOL_REF_DECL (x);
3101 		if (t)
3102 		  assemble_external (t);
3103 	      }
3104 	  }
3105 
3106 	/* Output assembler code from the template.  */
3107 	output_asm_insn (templ, recog_data.operand);
3108 
3109 	/* Some target machines need to postscan each insn after
3110 	   it is output.  */
3111 	if (targetm.asm_out.final_postscan_insn)
3112 	  targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
3113 					       recog_data.n_operands);
3114 
3115 	if (!targetm.asm_out.unwind_emit_before_insn
3116 	    && targetm.asm_out.unwind_emit)
3117 	  targetm.asm_out.unwind_emit (asm_out_file, insn);
3118 
3119 	/* Let the debug info back-end know about this call.  We do this only
3120 	   after the instruction has been emitted because labels that may be
3121 	   created to reference the call instruction must appear after it.  */
3122 	if ((debug_variable_location_views || call_insn != NULL)
3123 	    && !DECL_IGNORED_P (current_function_decl))
3124 	  debug_hooks->var_location (insn);
3125 
3126 	current_output_insn = debug_insn = 0;
3127       }
3128     }
3129   return NEXT_INSN (insn);
3130 }
3131 
3132 /* This is a wrapper around final_scan_insn_1 that allows ports to
3133    call it recursively without a known value for SEEN.  The value is
3134    saved at the outermost call, and recovered for recursive calls.
3135    Recursive calls MUST pass NULL, or the same pointer if they can
3136    otherwise get to it.  */
3137 
3138 rtx_insn *
final_scan_insn(rtx_insn * insn,FILE * file,int optimize_p,int nopeepholes,int * seen)3139 final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p,
3140 		 int nopeepholes, int *seen)
3141 {
3142   static int *enclosing_seen;
3143   static int recursion_counter;
3144 
3145   gcc_assert (seen || recursion_counter);
3146   gcc_assert (!recursion_counter || !seen || seen == enclosing_seen);
3147 
3148   if (!recursion_counter++)
3149     enclosing_seen = seen;
3150   else if (!seen)
3151     seen = enclosing_seen;
3152 
3153   rtx_insn *ret = final_scan_insn_1 (insn, file, optimize_p, nopeepholes, seen);
3154 
3155   if (!--recursion_counter)
3156     enclosing_seen = NULL;
3157 
3158   return ret;
3159 }
3160 
3161 
3162 
3163 /* Map DECLs to instance discriminators.  This is allocated and
3164    defined in ada/gcc-interfaces/trans.c, when compiling with -gnateS.
3165    Mappings from this table are saved and restored for LTO, so
3166    link-time compilation will have this map set, at least in
3167    partitions containing at least one DECL with an associated instance
3168    discriminator.  */
3169 
3170 decl_to_instance_map_t *decl_to_instance_map;
3171 
3172 /* Return the instance number assigned to DECL.  */
3173 
3174 static inline int
map_decl_to_instance(const_tree decl)3175 map_decl_to_instance (const_tree decl)
3176 {
3177   int *inst;
3178 
3179   if (!decl_to_instance_map || !decl || !DECL_P (decl))
3180     return 0;
3181 
3182   inst = decl_to_instance_map->get (decl);
3183 
3184   if (!inst)
3185     return 0;
3186 
3187   return *inst;
3188 }
3189 
3190 /* Set DISCRIMINATOR to the appropriate value, possibly derived from LOC.  */
3191 
3192 static inline int
compute_discriminator(location_t loc)3193 compute_discriminator (location_t loc)
3194 {
3195   int discriminator;
3196 
3197   if (!decl_to_instance_map)
3198     discriminator = bb_discriminator;
3199   else
3200     {
3201       tree block = LOCATION_BLOCK (loc);
3202 
3203       while (block && TREE_CODE (block) == BLOCK
3204 	     && !inlined_function_outer_scope_p (block))
3205 	block = BLOCK_SUPERCONTEXT (block);
3206 
3207       tree decl;
3208 
3209       if (!block)
3210 	decl = current_function_decl;
3211       else if (DECL_P (block))
3212 	decl = block;
3213       else
3214 	decl = block_ultimate_origin (block);
3215 
3216       discriminator = map_decl_to_instance (decl);
3217     }
3218 
3219   return discriminator;
3220 }
3221 
3222 /* Return whether a source line note needs to be emitted before INSN.
3223    Sets IS_STMT to TRUE if the line should be marked as a possible
3224    breakpoint location.  */
3225 
3226 static bool
notice_source_line(rtx_insn * insn,bool * is_stmt)3227 notice_source_line (rtx_insn *insn, bool *is_stmt)
3228 {
3229   const char *filename;
3230   int linenum, columnnum;
3231 
3232   if (NOTE_MARKER_P (insn))
3233     {
3234       location_t loc = NOTE_MARKER_LOCATION (insn);
3235       expanded_location xloc = expand_location (loc);
3236       if (xloc.line == 0)
3237 	{
3238 	  gcc_checking_assert (LOCATION_LOCUS (loc) == UNKNOWN_LOCATION
3239 			       || LOCATION_LOCUS (loc) == BUILTINS_LOCATION);
3240 	  return false;
3241 	}
3242       filename = xloc.file;
3243       linenum = xloc.line;
3244       columnnum = xloc.column;
3245       discriminator = compute_discriminator (loc);
3246       force_source_line = true;
3247     }
3248   else if (override_filename)
3249     {
3250       filename = override_filename;
3251       linenum = override_linenum;
3252       columnnum = override_columnnum;
3253       discriminator = override_discriminator;
3254     }
3255   else if (INSN_HAS_LOCATION (insn))
3256     {
3257       expanded_location xloc = insn_location (insn);
3258       filename = xloc.file;
3259       linenum = xloc.line;
3260       columnnum = xloc.column;
3261       discriminator = compute_discriminator (INSN_LOCATION (insn));
3262     }
3263   else
3264     {
3265       filename = NULL;
3266       linenum = 0;
3267       columnnum = 0;
3268       discriminator = 0;
3269     }
3270 
3271   if (filename == NULL)
3272     return false;
3273 
3274   if (force_source_line
3275       || filename != last_filename
3276       || last_linenum != linenum
3277       || (debug_column_info && last_columnnum != columnnum))
3278     {
3279       force_source_line = false;
3280       last_filename = filename;
3281       last_linenum = linenum;
3282       last_columnnum = columnnum;
3283       last_discriminator = discriminator;
3284       if (is_stmt)
3285 	*is_stmt = true;
3286       high_block_linenum = MAX (last_linenum, high_block_linenum);
3287       high_function_linenum = MAX (last_linenum, high_function_linenum);
3288       return true;
3289     }
3290 
3291   if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3292     {
3293       /* If the discriminator changed, but the line number did not,
3294          output the line table entry with is_stmt false so the
3295          debugger does not treat this as a breakpoint location.  */
3296       last_discriminator = discriminator;
3297       if (is_stmt)
3298 	*is_stmt = false;
3299       return true;
3300     }
3301 
3302   return false;
3303 }
3304 
3305 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3306    directly to the desired hard register.  */
3307 
3308 void
cleanup_subreg_operands(rtx_insn * insn)3309 cleanup_subreg_operands (rtx_insn *insn)
3310 {
3311   int i;
3312   bool changed = false;
3313   extract_insn_cached (insn);
3314   for (i = 0; i < recog_data.n_operands; i++)
3315     {
3316       /* The following test cannot use recog_data.operand when testing
3317 	 for a SUBREG: the underlying object might have been changed
3318 	 already if we are inside a match_operator expression that
3319 	 matches the else clause.  Instead we test the underlying
3320 	 expression directly.  */
3321       if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3322 	{
3323 	  recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3324 	  changed = true;
3325 	}
3326       else if (GET_CODE (recog_data.operand[i]) == PLUS
3327 	       || GET_CODE (recog_data.operand[i]) == MULT
3328 	       || MEM_P (recog_data.operand[i]))
3329 	recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3330     }
3331 
3332   for (i = 0; i < recog_data.n_dups; i++)
3333     {
3334       if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3335 	{
3336 	  *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3337 	  changed = true;
3338 	}
3339       else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3340 	       || GET_CODE (*recog_data.dup_loc[i]) == MULT
3341 	       || MEM_P (*recog_data.dup_loc[i]))
3342 	*recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3343     }
3344   if (changed)
3345     df_insn_rescan (insn);
3346 }
3347 
3348 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3349    the thing it is a subreg of.  Do it anyway if FINAL_P.  */
3350 
3351 rtx
alter_subreg(rtx * xp,bool final_p)3352 alter_subreg (rtx *xp, bool final_p)
3353 {
3354   rtx x = *xp;
3355   rtx y = SUBREG_REG (x);
3356 
3357   /* simplify_subreg does not remove subreg from volatile references.
3358      We are required to.  */
3359   if (MEM_P (y))
3360     {
3361       poly_int64 offset = SUBREG_BYTE (x);
3362 
3363       /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3364 	 contains 0 instead of the proper offset.  See simplify_subreg.  */
3365       if (paradoxical_subreg_p (x))
3366 	offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3367 
3368       if (final_p)
3369 	*xp = adjust_address (y, GET_MODE (x), offset);
3370       else
3371 	*xp = adjust_address_nv (y, GET_MODE (x), offset);
3372     }
3373   else if (REG_P (y) && HARD_REGISTER_P (y))
3374     {
3375       rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3376 				     SUBREG_BYTE (x));
3377 
3378       if (new_rtx != 0)
3379 	*xp = new_rtx;
3380       else if (final_p && REG_P (y))
3381 	{
3382 	  /* Simplify_subreg can't handle some REG cases, but we have to.  */
3383 	  unsigned int regno;
3384 	  poly_int64 offset;
3385 
3386 	  regno = subreg_regno (x);
3387 	  if (subreg_lowpart_p (x))
3388 	    offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3389 	  else
3390 	    offset = SUBREG_BYTE (x);
3391 	  *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3392 	}
3393     }
3394 
3395   return *xp;
3396 }
3397 
3398 /* Do alter_subreg on all the SUBREGs contained in X.  */
3399 
3400 static rtx
walk_alter_subreg(rtx * xp,bool * changed)3401 walk_alter_subreg (rtx *xp, bool *changed)
3402 {
3403   rtx x = *xp;
3404   switch (GET_CODE (x))
3405     {
3406     case PLUS:
3407     case MULT:
3408     case AND:
3409       XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3410       XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3411       break;
3412 
3413     case MEM:
3414     case ZERO_EXTEND:
3415       XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3416       break;
3417 
3418     case SUBREG:
3419       *changed = true;
3420       return alter_subreg (xp, true);
3421 
3422     default:
3423       break;
3424     }
3425 
3426   return *xp;
3427 }
3428 
3429 #if HAVE_cc0
3430 
3431 /* Given BODY, the body of a jump instruction, alter the jump condition
3432    as required by the bits that are set in cc_status.flags.
3433    Not all of the bits there can be handled at this level in all cases.
3434 
3435    The value is normally 0.
3436    1 means that the condition has become always true.
3437    -1 means that the condition has become always false.
3438    2 means that COND has been altered.  */
3439 
3440 static int
alter_cond(rtx cond)3441 alter_cond (rtx cond)
3442 {
3443   int value = 0;
3444 
3445   if (cc_status.flags & CC_REVERSED)
3446     {
3447       value = 2;
3448       PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3449     }
3450 
3451   if (cc_status.flags & CC_INVERTED)
3452     {
3453       value = 2;
3454       PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3455     }
3456 
3457   if (cc_status.flags & CC_NOT_POSITIVE)
3458     switch (GET_CODE (cond))
3459       {
3460       case LE:
3461       case LEU:
3462       case GEU:
3463 	/* Jump becomes unconditional.  */
3464 	return 1;
3465 
3466       case GT:
3467       case GTU:
3468       case LTU:
3469 	/* Jump becomes no-op.  */
3470 	return -1;
3471 
3472       case GE:
3473 	PUT_CODE (cond, EQ);
3474 	value = 2;
3475 	break;
3476 
3477       case LT:
3478 	PUT_CODE (cond, NE);
3479 	value = 2;
3480 	break;
3481 
3482       default:
3483 	break;
3484       }
3485 
3486   if (cc_status.flags & CC_NOT_NEGATIVE)
3487     switch (GET_CODE (cond))
3488       {
3489       case GE:
3490       case GEU:
3491 	/* Jump becomes unconditional.  */
3492 	return 1;
3493 
3494       case LT:
3495       case LTU:
3496 	/* Jump becomes no-op.  */
3497 	return -1;
3498 
3499       case LE:
3500       case LEU:
3501 	PUT_CODE (cond, EQ);
3502 	value = 2;
3503 	break;
3504 
3505       case GT:
3506       case GTU:
3507 	PUT_CODE (cond, NE);
3508 	value = 2;
3509 	break;
3510 
3511       default:
3512 	break;
3513       }
3514 
3515   if (cc_status.flags & CC_NO_OVERFLOW)
3516     switch (GET_CODE (cond))
3517       {
3518       case GEU:
3519 	/* Jump becomes unconditional.  */
3520 	return 1;
3521 
3522       case LEU:
3523 	PUT_CODE (cond, EQ);
3524 	value = 2;
3525 	break;
3526 
3527       case GTU:
3528 	PUT_CODE (cond, NE);
3529 	value = 2;
3530 	break;
3531 
3532       case LTU:
3533 	/* Jump becomes no-op.  */
3534 	return -1;
3535 
3536       default:
3537 	break;
3538       }
3539 
3540   if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3541     switch (GET_CODE (cond))
3542       {
3543       default:
3544 	gcc_unreachable ();
3545 
3546       case NE:
3547 	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3548 	value = 2;
3549 	break;
3550 
3551       case EQ:
3552 	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3553 	value = 2;
3554 	break;
3555       }
3556 
3557   if (cc_status.flags & CC_NOT_SIGNED)
3558     /* The flags are valid if signed condition operators are converted
3559        to unsigned.  */
3560     switch (GET_CODE (cond))
3561       {
3562       case LE:
3563 	PUT_CODE (cond, LEU);
3564 	value = 2;
3565 	break;
3566 
3567       case LT:
3568 	PUT_CODE (cond, LTU);
3569 	value = 2;
3570 	break;
3571 
3572       case GT:
3573 	PUT_CODE (cond, GTU);
3574 	value = 2;
3575 	break;
3576 
3577       case GE:
3578 	PUT_CODE (cond, GEU);
3579 	value = 2;
3580 	break;
3581 
3582       default:
3583 	break;
3584       }
3585 
3586   return value;
3587 }
3588 #endif
3589 
3590 /* Report inconsistency between the assembler template and the operands.
3591    In an `asm', it's the user's fault; otherwise, the compiler's fault.  */
3592 
3593 void
output_operand_lossage(const char * cmsgid,...)3594 output_operand_lossage (const char *cmsgid, ...)
3595 {
3596   char *fmt_string;
3597   char *new_message;
3598   const char *pfx_str;
3599   va_list ap;
3600 
3601   va_start (ap, cmsgid);
3602 
3603   pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3604   fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3605   new_message = xvasprintf (fmt_string, ap);
3606 
3607   if (this_is_asm_operands)
3608     error_for_asm (this_is_asm_operands, "%s", new_message);
3609   else
3610     internal_error ("%s", new_message);
3611 
3612   free (fmt_string);
3613   free (new_message);
3614   va_end (ap);
3615 }
3616 
3617 /* Output of assembler code from a template, and its subroutines.  */
3618 
3619 /* Annotate the assembly with a comment describing the pattern and
3620    alternative used.  */
3621 
3622 static void
output_asm_name(void)3623 output_asm_name (void)
3624 {
3625   if (debug_insn)
3626     {
3627       fprintf (asm_out_file, "\t%s %d\t",
3628 	       ASM_COMMENT_START, INSN_UID (debug_insn));
3629 
3630       fprintf (asm_out_file, "[c=%d",
3631 	       insn_cost (debug_insn, optimize_insn_for_speed_p ()));
3632       if (HAVE_ATTR_length)
3633 	fprintf (asm_out_file, " l=%d",
3634 		 get_attr_length (debug_insn));
3635       fprintf (asm_out_file, "]  ");
3636 
3637       int num = INSN_CODE (debug_insn);
3638       fprintf (asm_out_file, "%s", insn_data[num].name);
3639       if (insn_data[num].n_alternatives > 1)
3640 	fprintf (asm_out_file, "/%d", which_alternative);
3641 
3642       /* Clear this so only the first assembler insn
3643 	 of any rtl insn will get the special comment for -dp.  */
3644       debug_insn = 0;
3645     }
3646 }
3647 
3648 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3649    or its address, return that expr .  Set *PADDRESSP to 1 if the expr
3650    corresponds to the address of the object and 0 if to the object.  */
3651 
3652 static tree
get_mem_expr_from_op(rtx op,int * paddressp)3653 get_mem_expr_from_op (rtx op, int *paddressp)
3654 {
3655   tree expr;
3656   int inner_addressp;
3657 
3658   *paddressp = 0;
3659 
3660   if (REG_P (op))
3661     return REG_EXPR (op);
3662   else if (!MEM_P (op))
3663     return 0;
3664 
3665   if (MEM_EXPR (op) != 0)
3666     return MEM_EXPR (op);
3667 
3668   /* Otherwise we have an address, so indicate it and look at the address.  */
3669   *paddressp = 1;
3670   op = XEXP (op, 0);
3671 
3672   /* First check if we have a decl for the address, then look at the right side
3673      if it is a PLUS.  Otherwise, strip off arithmetic and keep looking.
3674      But don't allow the address to itself be indirect.  */
3675   if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3676     return expr;
3677   else if (GET_CODE (op) == PLUS
3678 	   && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3679     return expr;
3680 
3681   while (UNARY_P (op)
3682 	 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3683     op = XEXP (op, 0);
3684 
3685   expr = get_mem_expr_from_op (op, &inner_addressp);
3686   return inner_addressp ? 0 : expr;
3687 }
3688 
3689 /* Output operand names for assembler instructions.  OPERANDS is the
3690    operand vector, OPORDER is the order to write the operands, and NOPS
3691    is the number of operands to write.  */
3692 
3693 static void
output_asm_operand_names(rtx * operands,int * oporder,int nops)3694 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3695 {
3696   int wrote = 0;
3697   int i;
3698 
3699   for (i = 0; i < nops; i++)
3700     {
3701       int addressp;
3702       rtx op = operands[oporder[i]];
3703       tree expr = get_mem_expr_from_op (op, &addressp);
3704 
3705       fprintf (asm_out_file, "%c%s",
3706 	       wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3707       wrote = 1;
3708       if (expr)
3709 	{
3710 	  fprintf (asm_out_file, "%s",
3711 		   addressp ? "*" : "");
3712 	  print_mem_expr (asm_out_file, expr);
3713 	  wrote = 1;
3714 	}
3715       else if (REG_P (op) && ORIGINAL_REGNO (op)
3716 	       && ORIGINAL_REGNO (op) != REGNO (op))
3717 	fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3718     }
3719 }
3720 
3721 #ifdef ASSEMBLER_DIALECT
3722 /* Helper function to parse assembler dialects in the asm string.
3723    This is called from output_asm_insn and asm_fprintf.  */
3724 static const char *
do_assembler_dialects(const char * p,int * dialect)3725 do_assembler_dialects (const char *p, int *dialect)
3726 {
3727   char c = *(p - 1);
3728 
3729   switch (c)
3730     {
3731     case '{':
3732       {
3733         int i;
3734 
3735         if (*dialect)
3736           output_operand_lossage ("nested assembly dialect alternatives");
3737         else
3738           *dialect = 1;
3739 
3740         /* If we want the first dialect, do nothing.  Otherwise, skip
3741            DIALECT_NUMBER of strings ending with '|'.  */
3742         for (i = 0; i < dialect_number; i++)
3743           {
3744             while (*p && *p != '}')
3745 	      {
3746 		if (*p == '|')
3747 		  {
3748 		    p++;
3749 		    break;
3750 		  }
3751 
3752 		/* Skip over any character after a percent sign.  */
3753 		if (*p == '%')
3754 		  p++;
3755 		if (*p)
3756 		  p++;
3757 	      }
3758 
3759             if (*p == '}')
3760 	      break;
3761           }
3762 
3763         if (*p == '\0')
3764           output_operand_lossage ("unterminated assembly dialect alternative");
3765       }
3766       break;
3767 
3768     case '|':
3769       if (*dialect)
3770         {
3771           /* Skip to close brace.  */
3772           do
3773             {
3774 	      if (*p == '\0')
3775 		{
3776 		  output_operand_lossage ("unterminated assembly dialect alternative");
3777 		  break;
3778 		}
3779 
3780 	      /* Skip over any character after a percent sign.  */
3781 	      if (*p == '%' && p[1])
3782 		{
3783 		  p += 2;
3784 		  continue;
3785 		}
3786 
3787 	      if (*p++ == '}')
3788 		break;
3789             }
3790           while (1);
3791 
3792           *dialect = 0;
3793         }
3794       else
3795         putc (c, asm_out_file);
3796       break;
3797 
3798     case '}':
3799       if (! *dialect)
3800         putc (c, asm_out_file);
3801       *dialect = 0;
3802       break;
3803     default:
3804       gcc_unreachable ();
3805     }
3806 
3807   return p;
3808 }
3809 #endif
3810 
3811 /* Output text from TEMPLATE to the assembler output file,
3812    obeying %-directions to substitute operands taken from
3813    the vector OPERANDS.
3814 
3815    %N (for N a digit) means print operand N in usual manner.
3816    %lN means require operand N to be a CODE_LABEL or LABEL_REF
3817       and print the label name with no punctuation.
3818    %cN means require operand N to be a constant
3819       and print the constant expression with no punctuation.
3820    %aN means expect operand N to be a memory address
3821       (not a memory reference!) and print a reference
3822       to that address.
3823    %nN means expect operand N to be a constant
3824       and print a constant expression for minus the value
3825       of the operand, with no other punctuation.  */
3826 
3827 void
output_asm_insn(const char * templ,rtx * operands)3828 output_asm_insn (const char *templ, rtx *operands)
3829 {
3830   const char *p;
3831   int c;
3832 #ifdef ASSEMBLER_DIALECT
3833   int dialect = 0;
3834 #endif
3835   int oporder[MAX_RECOG_OPERANDS];
3836   char opoutput[MAX_RECOG_OPERANDS];
3837   int ops = 0;
3838 
3839   /* An insn may return a null string template
3840      in a case where no assembler code is needed.  */
3841   if (*templ == 0)
3842     return;
3843 
3844   memset (opoutput, 0, sizeof opoutput);
3845   p = templ;
3846   putc ('\t', asm_out_file);
3847 
3848 #ifdef ASM_OUTPUT_OPCODE
3849   ASM_OUTPUT_OPCODE (asm_out_file, p);
3850 #endif
3851 
3852   while ((c = *p++))
3853     switch (c)
3854       {
3855       case '\n':
3856 	if (flag_verbose_asm)
3857 	  output_asm_operand_names (operands, oporder, ops);
3858 	if (flag_print_asm_name)
3859 	  output_asm_name ();
3860 
3861 	ops = 0;
3862 	memset (opoutput, 0, sizeof opoutput);
3863 
3864 	putc (c, asm_out_file);
3865 #ifdef ASM_OUTPUT_OPCODE
3866 	while ((c = *p) == '\t')
3867 	  {
3868 	    putc (c, asm_out_file);
3869 	    p++;
3870 	  }
3871 	ASM_OUTPUT_OPCODE (asm_out_file, p);
3872 #endif
3873 	break;
3874 
3875 #ifdef ASSEMBLER_DIALECT
3876       case '{':
3877       case '}':
3878       case '|':
3879 	p = do_assembler_dialects (p, &dialect);
3880 	break;
3881 #endif
3882 
3883       case '%':
3884 	/* %% outputs a single %.  %{, %} and %| print {, } and | respectively
3885 	   if ASSEMBLER_DIALECT defined and these characters have a special
3886 	   meaning as dialect delimiters.*/
3887 	if (*p == '%'
3888 #ifdef ASSEMBLER_DIALECT
3889 	    || *p == '{' || *p == '}' || *p == '|'
3890 #endif
3891 	    )
3892 	  {
3893 	    putc (*p, asm_out_file);
3894 	    p++;
3895 	  }
3896 	/* %= outputs a number which is unique to each insn in the entire
3897 	   compilation.  This is useful for making local labels that are
3898 	   referred to more than once in a given insn.  */
3899 	else if (*p == '=')
3900 	  {
3901 	    p++;
3902 	    fprintf (asm_out_file, "%d", insn_counter);
3903 	  }
3904 	/* % followed by a letter and some digits
3905 	   outputs an operand in a special way depending on the letter.
3906 	   Letters `acln' are implemented directly.
3907 	   Other letters are passed to `output_operand' so that
3908 	   the TARGET_PRINT_OPERAND hook can define them.  */
3909 	else if (ISALPHA (*p))
3910 	  {
3911 	    int letter = *p++;
3912 	    unsigned long opnum;
3913 	    char *endptr;
3914 
3915 	    opnum = strtoul (p, &endptr, 10);
3916 
3917 	    if (endptr == p)
3918 	      output_operand_lossage ("operand number missing "
3919 				      "after %%-letter");
3920 	    else if (this_is_asm_operands && opnum >= insn_noperands)
3921 	      output_operand_lossage ("operand number out of range");
3922 	    else if (letter == 'l')
3923 	      output_asm_label (operands[opnum]);
3924 	    else if (letter == 'a')
3925 	      output_address (VOIDmode, operands[opnum]);
3926 	    else if (letter == 'c')
3927 	      {
3928 		if (CONSTANT_ADDRESS_P (operands[opnum]))
3929 		  output_addr_const (asm_out_file, operands[opnum]);
3930 		else
3931 		  output_operand (operands[opnum], 'c');
3932 	      }
3933 	    else if (letter == 'n')
3934 	      {
3935 		if (CONST_INT_P (operands[opnum]))
3936 		  fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3937 			   - INTVAL (operands[opnum]));
3938 		else
3939 		  {
3940 		    putc ('-', asm_out_file);
3941 		    output_addr_const (asm_out_file, operands[opnum]);
3942 		  }
3943 	      }
3944 	    else
3945 	      output_operand (operands[opnum], letter);
3946 
3947 	    if (!opoutput[opnum])
3948 	      oporder[ops++] = opnum;
3949 	    opoutput[opnum] = 1;
3950 
3951 	    p = endptr;
3952 	    c = *p;
3953 	  }
3954 	/* % followed by a digit outputs an operand the default way.  */
3955 	else if (ISDIGIT (*p))
3956 	  {
3957 	    unsigned long opnum;
3958 	    char *endptr;
3959 
3960 	    opnum = strtoul (p, &endptr, 10);
3961 	    if (this_is_asm_operands && opnum >= insn_noperands)
3962 	      output_operand_lossage ("operand number out of range");
3963 	    else
3964 	      output_operand (operands[opnum], 0);
3965 
3966 	    if (!opoutput[opnum])
3967 	      oporder[ops++] = opnum;
3968 	    opoutput[opnum] = 1;
3969 
3970 	    p = endptr;
3971 	    c = *p;
3972 	  }
3973 	/* % followed by punctuation: output something for that
3974 	   punctuation character alone, with no operand.  The
3975 	   TARGET_PRINT_OPERAND hook decides what is actually done.  */
3976 	else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3977 	  output_operand (NULL_RTX, *p++);
3978 	else
3979 	  output_operand_lossage ("invalid %%-code");
3980 	break;
3981 
3982       default:
3983 	putc (c, asm_out_file);
3984       }
3985 
3986   /* Try to keep the asm a bit more readable.  */
3987   if ((flag_verbose_asm || flag_print_asm_name) && strlen (templ) < 9)
3988     putc ('\t', asm_out_file);
3989 
3990   /* Write out the variable names for operands, if we know them.  */
3991   if (flag_verbose_asm)
3992     output_asm_operand_names (operands, oporder, ops);
3993   if (flag_print_asm_name)
3994     output_asm_name ();
3995 
3996   putc ('\n', asm_out_file);
3997 }
3998 
3999 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol.  */
4000 
4001 void
output_asm_label(rtx x)4002 output_asm_label (rtx x)
4003 {
4004   char buf[256];
4005 
4006   if (GET_CODE (x) == LABEL_REF)
4007     x = label_ref_label (x);
4008   if (LABEL_P (x)
4009       || (NOTE_P (x)
4010 	  && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
4011     ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
4012   else
4013     output_operand_lossage ("'%%l' operand isn't a label");
4014 
4015   assemble_name (asm_out_file, buf);
4016 }
4017 
4018 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external.  */
4019 
4020 void
mark_symbol_refs_as_used(rtx x)4021 mark_symbol_refs_as_used (rtx x)
4022 {
4023   subrtx_iterator::array_type array;
4024   FOR_EACH_SUBRTX (iter, array, x, ALL)
4025     {
4026       const_rtx x = *iter;
4027       if (GET_CODE (x) == SYMBOL_REF)
4028 	if (tree t = SYMBOL_REF_DECL (x))
4029 	  assemble_external (t);
4030     }
4031 }
4032 
4033 /* Print operand X using machine-dependent assembler syntax.
4034    CODE is a non-digit that preceded the operand-number in the % spec,
4035    such as 'z' if the spec was `%z3'.  CODE is 0 if there was no char
4036    between the % and the digits.
4037    When CODE is a non-letter, X is 0.
4038 
4039    The meanings of the letters are machine-dependent and controlled
4040    by TARGET_PRINT_OPERAND.  */
4041 
4042 void
output_operand(rtx x,int code ATTRIBUTE_UNUSED)4043 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
4044 {
4045   if (x && GET_CODE (x) == SUBREG)
4046     x = alter_subreg (&x, true);
4047 
4048   /* X must not be a pseudo reg.  */
4049   if (!targetm.no_register_allocation)
4050     gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
4051 
4052   targetm.asm_out.print_operand (asm_out_file, x, code);
4053 
4054   if (x == NULL_RTX)
4055     return;
4056 
4057   mark_symbol_refs_as_used (x);
4058 }
4059 
4060 /* Print a memory reference operand for address X using
4061    machine-dependent assembler syntax.  */
4062 
4063 void
output_address(machine_mode mode,rtx x)4064 output_address (machine_mode mode, rtx x)
4065 {
4066   bool changed = false;
4067   walk_alter_subreg (&x, &changed);
4068   targetm.asm_out.print_operand_address (asm_out_file, mode, x);
4069 }
4070 
4071 /* Print an integer constant expression in assembler syntax.
4072    Addition and subtraction are the only arithmetic
4073    that may appear in these expressions.  */
4074 
4075 void
output_addr_const(FILE * file,rtx x)4076 output_addr_const (FILE *file, rtx x)
4077 {
4078   char buf[256];
4079 
4080  restart:
4081   switch (GET_CODE (x))
4082     {
4083     case PC:
4084       putc ('.', file);
4085       break;
4086 
4087     case SYMBOL_REF:
4088       if (SYMBOL_REF_DECL (x))
4089 	assemble_external (SYMBOL_REF_DECL (x));
4090 #ifdef ASM_OUTPUT_SYMBOL_REF
4091       ASM_OUTPUT_SYMBOL_REF (file, x);
4092 #else
4093       assemble_name (file, XSTR (x, 0));
4094 #endif
4095       break;
4096 
4097     case LABEL_REF:
4098       x = label_ref_label (x);
4099       /* Fall through.  */
4100     case CODE_LABEL:
4101       ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
4102 #ifdef ASM_OUTPUT_LABEL_REF
4103       ASM_OUTPUT_LABEL_REF (file, buf);
4104 #else
4105       assemble_name (file, buf);
4106 #endif
4107       break;
4108 
4109     case CONST_INT:
4110       fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4111       break;
4112 
4113     case CONST:
4114       /* This used to output parentheses around the expression,
4115 	 but that does not work on the 386 (either ATT or BSD assembler).  */
4116       output_addr_const (file, XEXP (x, 0));
4117       break;
4118 
4119     case CONST_WIDE_INT:
4120       /* We do not know the mode here so we have to use a round about
4121 	 way to build a wide-int to get it printed properly.  */
4122       {
4123 	wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
4124 					   CONST_WIDE_INT_NUNITS (x),
4125 					   CONST_WIDE_INT_NUNITS (x)
4126 					   * HOST_BITS_PER_WIDE_INT,
4127 					   false);
4128 	print_decs (w, file);
4129       }
4130       break;
4131 
4132     case CONST_DOUBLE:
4133       if (CONST_DOUBLE_AS_INT_P (x))
4134 	{
4135 	  /* We can use %d if the number is one word and positive.  */
4136 	  if (CONST_DOUBLE_HIGH (x))
4137 	    fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
4138 		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
4139 		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
4140 	  else if (CONST_DOUBLE_LOW (x) < 0)
4141 	    fprintf (file, HOST_WIDE_INT_PRINT_HEX,
4142 		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
4143 	  else
4144 	    fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
4145 	}
4146       else
4147 	/* We can't handle floating point constants;
4148 	   PRINT_OPERAND must handle them.  */
4149 	output_operand_lossage ("floating constant misused");
4150       break;
4151 
4152     case CONST_FIXED:
4153       fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
4154       break;
4155 
4156     case PLUS:
4157       /* Some assemblers need integer constants to appear last (eg masm).  */
4158       if (CONST_INT_P (XEXP (x, 0)))
4159 	{
4160 	  output_addr_const (file, XEXP (x, 1));
4161 	  if (INTVAL (XEXP (x, 0)) >= 0)
4162 	    fprintf (file, "+");
4163 	  output_addr_const (file, XEXP (x, 0));
4164 	}
4165       else
4166 	{
4167 	  output_addr_const (file, XEXP (x, 0));
4168 	  if (!CONST_INT_P (XEXP (x, 1))
4169 	      || INTVAL (XEXP (x, 1)) >= 0)
4170 	    fprintf (file, "+");
4171 	  output_addr_const (file, XEXP (x, 1));
4172 	}
4173       break;
4174 
4175     case MINUS:
4176       /* Avoid outputting things like x-x or x+5-x,
4177 	 since some assemblers can't handle that.  */
4178       x = simplify_subtraction (x);
4179       if (GET_CODE (x) != MINUS)
4180 	goto restart;
4181 
4182       output_addr_const (file, XEXP (x, 0));
4183       fprintf (file, "-");
4184       if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
4185 	  || GET_CODE (XEXP (x, 1)) == PC
4186 	  || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
4187 	output_addr_const (file, XEXP (x, 1));
4188       else
4189 	{
4190 	  fputs (targetm.asm_out.open_paren, file);
4191 	  output_addr_const (file, XEXP (x, 1));
4192 	  fputs (targetm.asm_out.close_paren, file);
4193 	}
4194       break;
4195 
4196     case ZERO_EXTEND:
4197     case SIGN_EXTEND:
4198     case SUBREG:
4199     case TRUNCATE:
4200       output_addr_const (file, XEXP (x, 0));
4201       break;
4202 
4203     default:
4204       if (targetm.asm_out.output_addr_const_extra (file, x))
4205 	break;
4206 
4207       output_operand_lossage ("invalid expression as operand");
4208     }
4209 }
4210 
4211 /* Output a quoted string.  */
4212 
4213 void
output_quoted_string(FILE * asm_file,const char * string)4214 output_quoted_string (FILE *asm_file, const char *string)
4215 {
4216 #ifdef OUTPUT_QUOTED_STRING
4217   OUTPUT_QUOTED_STRING (asm_file, string);
4218 #else
4219   char c;
4220 
4221   putc ('\"', asm_file);
4222   while ((c = *string++) != 0)
4223     {
4224       if (ISPRINT (c))
4225 	{
4226 	  if (c == '\"' || c == '\\')
4227 	    putc ('\\', asm_file);
4228 	  putc (c, asm_file);
4229 	}
4230       else
4231 	fprintf (asm_file, "\\%03o", (unsigned char) c);
4232     }
4233   putc ('\"', asm_file);
4234 #endif
4235 }
4236 
4237 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
4238 
4239 void
fprint_whex(FILE * f,unsigned HOST_WIDE_INT value)4240 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
4241 {
4242   char buf[2 + CHAR_BIT * sizeof (value) / 4];
4243   if (value == 0)
4244     putc ('0', f);
4245   else
4246     {
4247       char *p = buf + sizeof (buf);
4248       do
4249         *--p = "0123456789abcdef"[value % 16];
4250       while ((value /= 16) != 0);
4251       *--p = 'x';
4252       *--p = '0';
4253       fwrite (p, 1, buf + sizeof (buf) - p, f);
4254     }
4255 }
4256 
4257 /* Internal function that prints an unsigned long in decimal in reverse.
4258    The output string IS NOT null-terminated. */
4259 
4260 static int
sprint_ul_rev(char * s,unsigned long value)4261 sprint_ul_rev (char *s, unsigned long value)
4262 {
4263   int i = 0;
4264   do
4265     {
4266       s[i] = "0123456789"[value % 10];
4267       value /= 10;
4268       i++;
4269       /* alternate version, without modulo */
4270       /* oldval = value; */
4271       /* value /= 10; */
4272       /* s[i] = "0123456789" [oldval - 10*value]; */
4273       /* i++ */
4274     }
4275   while (value != 0);
4276   return i;
4277 }
4278 
4279 /* Write an unsigned long as decimal to a file, fast. */
4280 
4281 void
fprint_ul(FILE * f,unsigned long value)4282 fprint_ul (FILE *f, unsigned long value)
4283 {
4284   /* python says: len(str(2**64)) == 20 */
4285   char s[20];
4286   int i;
4287 
4288   i = sprint_ul_rev (s, value);
4289 
4290   /* It's probably too small to bother with string reversal and fputs. */
4291   do
4292     {
4293       i--;
4294       putc (s[i], f);
4295     }
4296   while (i != 0);
4297 }
4298 
4299 /* Write an unsigned long as decimal to a string, fast.
4300    s must be wide enough to not overflow, at least 21 chars.
4301    Returns the length of the string (without terminating '\0'). */
4302 
4303 int
sprint_ul(char * s,unsigned long value)4304 sprint_ul (char *s, unsigned long value)
4305 {
4306   int len = sprint_ul_rev (s, value);
4307   s[len] = '\0';
4308 
4309   std::reverse (s, s + len);
4310   return len;
4311 }
4312 
4313 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
4314    %R prints the value of REGISTER_PREFIX.
4315    %L prints the value of LOCAL_LABEL_PREFIX.
4316    %U prints the value of USER_LABEL_PREFIX.
4317    %I prints the value of IMMEDIATE_PREFIX.
4318    %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
4319    Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
4320 
4321    We handle alternate assembler dialects here, just like output_asm_insn.  */
4322 
4323 void
asm_fprintf(FILE * file,const char * p,...)4324 asm_fprintf (FILE *file, const char *p, ...)
4325 {
4326   char buf[10];
4327   char *q, c;
4328 #ifdef ASSEMBLER_DIALECT
4329   int dialect = 0;
4330 #endif
4331   va_list argptr;
4332 
4333   va_start (argptr, p);
4334 
4335   buf[0] = '%';
4336 
4337   while ((c = *p++))
4338     switch (c)
4339       {
4340 #ifdef ASSEMBLER_DIALECT
4341       case '{':
4342       case '}':
4343       case '|':
4344 	p = do_assembler_dialects (p, &dialect);
4345 	break;
4346 #endif
4347 
4348       case '%':
4349 	c = *p++;
4350 	q = &buf[1];
4351 	while (strchr ("-+ #0", c))
4352 	  {
4353 	    *q++ = c;
4354 	    c = *p++;
4355 	  }
4356 	while (ISDIGIT (c) || c == '.')
4357 	  {
4358 	    *q++ = c;
4359 	    c = *p++;
4360 	  }
4361 	switch (c)
4362 	  {
4363 	  case '%':
4364 	    putc ('%', file);
4365 	    break;
4366 
4367 	  case 'd':  case 'i':  case 'u':
4368 	  case 'x':  case 'X':  case 'o':
4369 	  case 'c':
4370 	    *q++ = c;
4371 	    *q = 0;
4372 	    fprintf (file, buf, va_arg (argptr, int));
4373 	    break;
4374 
4375 	  case 'w':
4376 	    /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4377 	       'o' cases, but we do not check for those cases.  It
4378 	       means that the value is a HOST_WIDE_INT, which may be
4379 	       either `long' or `long long'.  */
4380 	    memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4381 	    q += strlen (HOST_WIDE_INT_PRINT);
4382 	    *q++ = *p++;
4383 	    *q = 0;
4384 	    fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4385 	    break;
4386 
4387 	  case 'l':
4388 	    *q++ = c;
4389 #ifdef HAVE_LONG_LONG
4390 	    if (*p == 'l')
4391 	      {
4392 		*q++ = *p++;
4393 		*q++ = *p++;
4394 		*q = 0;
4395 		fprintf (file, buf, va_arg (argptr, long long));
4396 	      }
4397 	    else
4398 #endif
4399 	      {
4400 		*q++ = *p++;
4401 		*q = 0;
4402 		fprintf (file, buf, va_arg (argptr, long));
4403 	      }
4404 
4405 	    break;
4406 
4407 	  case 's':
4408 	    *q++ = c;
4409 	    *q = 0;
4410 	    fprintf (file, buf, va_arg (argptr, char *));
4411 	    break;
4412 
4413 	  case 'O':
4414 #ifdef ASM_OUTPUT_OPCODE
4415 	    ASM_OUTPUT_OPCODE (asm_out_file, p);
4416 #endif
4417 	    break;
4418 
4419 	  case 'R':
4420 #ifdef REGISTER_PREFIX
4421 	    fprintf (file, "%s", REGISTER_PREFIX);
4422 #endif
4423 	    break;
4424 
4425 	  case 'I':
4426 #ifdef IMMEDIATE_PREFIX
4427 	    fprintf (file, "%s", IMMEDIATE_PREFIX);
4428 #endif
4429 	    break;
4430 
4431 	  case 'L':
4432 #ifdef LOCAL_LABEL_PREFIX
4433 	    fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4434 #endif
4435 	    break;
4436 
4437 	  case 'U':
4438 	    fputs (user_label_prefix, file);
4439 	    break;
4440 
4441 #ifdef ASM_FPRINTF_EXTENSIONS
4442 	    /* Uppercase letters are reserved for general use by asm_fprintf
4443 	       and so are not available to target specific code.  In order to
4444 	       prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4445 	       they are defined here.  As they get turned into real extensions
4446 	       to asm_fprintf they should be removed from this list.  */
4447 	  case 'A': case 'B': case 'C': case 'D': case 'E':
4448 	  case 'F': case 'G': case 'H': case 'J': case 'K':
4449 	  case 'M': case 'N': case 'P': case 'Q': case 'S':
4450 	  case 'T': case 'V': case 'W': case 'Y': case 'Z':
4451 	    break;
4452 
4453 	  ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4454 #endif
4455 	  default:
4456 	    gcc_unreachable ();
4457 	  }
4458 	break;
4459 
4460       default:
4461 	putc (c, file);
4462       }
4463   va_end (argptr);
4464 }
4465 
4466 /* Return nonzero if this function has no function calls.  */
4467 
4468 int
leaf_function_p(void)4469 leaf_function_p (void)
4470 {
4471   rtx_insn *insn;
4472 
4473   /* Ensure we walk the entire function body.  */
4474   gcc_assert (!in_sequence_p ());
4475 
4476   /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4477      functions even if they call mcount.  */
4478   if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4479     return 0;
4480 
4481   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4482     {
4483       if (CALL_P (insn)
4484 	  && ! SIBLING_CALL_P (insn))
4485 	return 0;
4486       if (NONJUMP_INSN_P (insn)
4487 	  && GET_CODE (PATTERN (insn)) == SEQUENCE
4488 	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4489 	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4490 	return 0;
4491     }
4492 
4493   return 1;
4494 }
4495 
4496 /* Return 1 if branch is a forward branch.
4497    Uses insn_shuid array, so it works only in the final pass.  May be used by
4498    output templates to customary add branch prediction hints.
4499  */
4500 int
final_forward_branch_p(rtx_insn * insn)4501 final_forward_branch_p (rtx_insn *insn)
4502 {
4503   int insn_id, label_id;
4504 
4505   gcc_assert (uid_shuid);
4506   insn_id = INSN_SHUID (insn);
4507   label_id = INSN_SHUID (JUMP_LABEL (insn));
4508   /* We've hit some insns that does not have id information available.  */
4509   gcc_assert (insn_id && label_id);
4510   return insn_id < label_id;
4511 }
4512 
4513 /* On some machines, a function with no call insns
4514    can run faster if it doesn't create its own register window.
4515    When output, the leaf function should use only the "output"
4516    registers.  Ordinarily, the function would be compiled to use
4517    the "input" registers to find its arguments; it is a candidate
4518    for leaf treatment if it uses only the "input" registers.
4519    Leaf function treatment means renumbering so the function
4520    uses the "output" registers instead.  */
4521 
4522 #ifdef LEAF_REGISTERS
4523 
4524 /* Return 1 if this function uses only the registers that can be
4525    safely renumbered.  */
4526 
4527 int
only_leaf_regs_used(void)4528 only_leaf_regs_used (void)
4529 {
4530   int i;
4531   const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4532 
4533   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4534     if ((df_regs_ever_live_p (i) || global_regs[i])
4535 	&& ! permitted_reg_in_leaf_functions[i])
4536       return 0;
4537 
4538   if (crtl->uses_pic_offset_table
4539       && pic_offset_table_rtx != 0
4540       && REG_P (pic_offset_table_rtx)
4541       && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4542     return 0;
4543 
4544   return 1;
4545 }
4546 
4547 /* Scan all instructions and renumber all registers into those
4548    available in leaf functions.  */
4549 
4550 static void
leaf_renumber_regs(rtx_insn * first)4551 leaf_renumber_regs (rtx_insn *first)
4552 {
4553   rtx_insn *insn;
4554 
4555   /* Renumber only the actual patterns.
4556      The reg-notes can contain frame pointer refs,
4557      and renumbering them could crash, and should not be needed.  */
4558   for (insn = first; insn; insn = NEXT_INSN (insn))
4559     if (INSN_P (insn))
4560       leaf_renumber_regs_insn (PATTERN (insn));
4561 }
4562 
4563 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4564    available in leaf functions.  */
4565 
4566 void
leaf_renumber_regs_insn(rtx in_rtx)4567 leaf_renumber_regs_insn (rtx in_rtx)
4568 {
4569   int i, j;
4570   const char *format_ptr;
4571 
4572   if (in_rtx == 0)
4573     return;
4574 
4575   /* Renumber all input-registers into output-registers.
4576      renumbered_regs would be 1 for an output-register;
4577      they  */
4578 
4579   if (REG_P (in_rtx))
4580     {
4581       int newreg;
4582 
4583       /* Don't renumber the same reg twice.  */
4584       if (in_rtx->used)
4585 	return;
4586 
4587       newreg = REGNO (in_rtx);
4588       /* Don't try to renumber pseudo regs.  It is possible for a pseudo reg
4589 	 to reach here as part of a REG_NOTE.  */
4590       if (newreg >= FIRST_PSEUDO_REGISTER)
4591 	{
4592 	  in_rtx->used = 1;
4593 	  return;
4594 	}
4595       newreg = LEAF_REG_REMAP (newreg);
4596       gcc_assert (newreg >= 0);
4597       df_set_regs_ever_live (REGNO (in_rtx), false);
4598       df_set_regs_ever_live (newreg, true);
4599       SET_REGNO (in_rtx, newreg);
4600       in_rtx->used = 1;
4601       return;
4602     }
4603 
4604   if (INSN_P (in_rtx))
4605     {
4606       /* Inside a SEQUENCE, we find insns.
4607 	 Renumber just the patterns of these insns,
4608 	 just as we do for the top-level insns.  */
4609       leaf_renumber_regs_insn (PATTERN (in_rtx));
4610       return;
4611     }
4612 
4613   format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4614 
4615   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4616     switch (*format_ptr++)
4617       {
4618       case 'e':
4619 	leaf_renumber_regs_insn (XEXP (in_rtx, i));
4620 	break;
4621 
4622       case 'E':
4623 	if (XVEC (in_rtx, i) != NULL)
4624 	  for (j = 0; j < XVECLEN (in_rtx, i); j++)
4625 	    leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4626 	break;
4627 
4628       case 'S':
4629       case 's':
4630       case '0':
4631       case 'i':
4632       case 'w':
4633       case 'p':
4634       case 'n':
4635       case 'u':
4636 	break;
4637 
4638       default:
4639 	gcc_unreachable ();
4640       }
4641 }
4642 #endif
4643 
4644 /* Turn the RTL into assembly.  */
4645 static unsigned int
rest_of_handle_final(void)4646 rest_of_handle_final (void)
4647 {
4648   const char *fnname = get_fnname_from_decl (current_function_decl);
4649 
4650   /* Turn debug markers into notes if the var-tracking pass has not
4651      been invoked.  */
4652   if (!flag_var_tracking && MAY_HAVE_DEBUG_MARKER_INSNS)
4653     delete_vta_debug_insns (false);
4654 
4655   assemble_start_function (current_function_decl, fnname);
4656   rtx_insn *first = get_insns ();
4657   int seen = 0;
4658   final_start_function_1 (&first, asm_out_file, &seen, optimize);
4659   final_1 (first, asm_out_file, seen, optimize);
4660   if (flag_ipa_ra
4661       && !lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl))
4662       /* Functions with naked attributes are supported only with basic asm
4663 	 statements in the body, thus for supported use cases the information
4664 	 on clobbered registers is not available.  */
4665       && !lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)))
4666     collect_fn_hard_reg_usage ();
4667   final_end_function ();
4668 
4669   /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4670      directive that closes the procedure descriptor.  Similarly, for x64 SEH.
4671      Otherwise it's not strictly necessary, but it doesn't hurt either.  */
4672   output_function_exception_table (crtl->has_bb_partition ? 1 : 0);
4673 
4674   assemble_end_function (current_function_decl, fnname);
4675 
4676   /* Free up reg info memory.  */
4677   free_reg_info ();
4678 
4679   if (! quiet_flag)
4680     fflush (asm_out_file);
4681 
4682   /* Write DBX symbols if requested.  */
4683 
4684   /* Note that for those inline functions where we don't initially
4685      know for certain that we will be generating an out-of-line copy,
4686      the first invocation of this routine (rest_of_compilation) will
4687      skip over this code by doing a `goto exit_rest_of_compilation;'.
4688      Later on, wrapup_global_declarations will (indirectly) call
4689      rest_of_compilation again for those inline functions that need
4690      to have out-of-line copies generated.  During that call, we
4691      *will* be routed past here.  */
4692 
4693   timevar_push (TV_SYMOUT);
4694   if (!DECL_IGNORED_P (current_function_decl))
4695     debug_hooks->function_decl (current_function_decl);
4696   timevar_pop (TV_SYMOUT);
4697 
4698   /* Release the blocks that are linked to DECL_INITIAL() to free the memory.  */
4699   DECL_INITIAL (current_function_decl) = error_mark_node;
4700 
4701   if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4702       && targetm.have_ctors_dtors)
4703     targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4704 				 decl_init_priority_lookup
4705 				   (current_function_decl));
4706   if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4707       && targetm.have_ctors_dtors)
4708     targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4709 				decl_fini_priority_lookup
4710 				  (current_function_decl));
4711   return 0;
4712 }
4713 
4714 namespace {
4715 
4716 const pass_data pass_data_final =
4717 {
4718   RTL_PASS, /* type */
4719   "final", /* name */
4720   OPTGROUP_NONE, /* optinfo_flags */
4721   TV_FINAL, /* tv_id */
4722   0, /* properties_required */
4723   0, /* properties_provided */
4724   0, /* properties_destroyed */
4725   0, /* todo_flags_start */
4726   0, /* todo_flags_finish */
4727 };
4728 
4729 class pass_final : public rtl_opt_pass
4730 {
4731 public:
pass_final(gcc::context * ctxt)4732   pass_final (gcc::context *ctxt)
4733     : rtl_opt_pass (pass_data_final, ctxt)
4734   {}
4735 
4736   /* opt_pass methods: */
execute(function *)4737   virtual unsigned int execute (function *) { return rest_of_handle_final (); }
4738 
4739 }; // class pass_final
4740 
4741 } // anon namespace
4742 
4743 rtl_opt_pass *
make_pass_final(gcc::context * ctxt)4744 make_pass_final (gcc::context *ctxt)
4745 {
4746   return new pass_final (ctxt);
4747 }
4748 
4749 
4750 static unsigned int
rest_of_handle_shorten_branches(void)4751 rest_of_handle_shorten_branches (void)
4752 {
4753   /* Shorten branches.  */
4754   shorten_branches (get_insns ());
4755   return 0;
4756 }
4757 
4758 namespace {
4759 
4760 const pass_data pass_data_shorten_branches =
4761 {
4762   RTL_PASS, /* type */
4763   "shorten", /* name */
4764   OPTGROUP_NONE, /* optinfo_flags */
4765   TV_SHORTEN_BRANCH, /* tv_id */
4766   0, /* properties_required */
4767   0, /* properties_provided */
4768   0, /* properties_destroyed */
4769   0, /* todo_flags_start */
4770   0, /* todo_flags_finish */
4771 };
4772 
4773 class pass_shorten_branches : public rtl_opt_pass
4774 {
4775 public:
pass_shorten_branches(gcc::context * ctxt)4776   pass_shorten_branches (gcc::context *ctxt)
4777     : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4778   {}
4779 
4780   /* opt_pass methods: */
execute(function *)4781   virtual unsigned int execute (function *)
4782     {
4783       return rest_of_handle_shorten_branches ();
4784     }
4785 
4786 }; // class pass_shorten_branches
4787 
4788 } // anon namespace
4789 
4790 rtl_opt_pass *
make_pass_shorten_branches(gcc::context * ctxt)4791 make_pass_shorten_branches (gcc::context *ctxt)
4792 {
4793   return new pass_shorten_branches (ctxt);
4794 }
4795 
4796 
4797 static unsigned int
rest_of_clean_state(void)4798 rest_of_clean_state (void)
4799 {
4800   rtx_insn *insn, *next;
4801   FILE *final_output = NULL;
4802   int save_unnumbered = flag_dump_unnumbered;
4803   int save_noaddr = flag_dump_noaddr;
4804 
4805   if (flag_dump_final_insns)
4806     {
4807       final_output = fopen (flag_dump_final_insns, "a");
4808       if (!final_output)
4809 	{
4810 	  error ("could not open final insn dump file %qs: %m",
4811 		 flag_dump_final_insns);
4812 	  flag_dump_final_insns = NULL;
4813 	}
4814       else
4815 	{
4816 	  flag_dump_noaddr = flag_dump_unnumbered = 1;
4817 	  if (flag_compare_debug_opt || flag_compare_debug)
4818 	    dump_flags |= TDF_NOUID | TDF_COMPARE_DEBUG;
4819 	  dump_function_header (final_output, current_function_decl,
4820 				dump_flags);
4821 	  final_insns_dump_p = true;
4822 
4823 	  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4824 	    if (LABEL_P (insn))
4825 	      INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4826 	    else
4827 	      {
4828 		if (NOTE_P (insn))
4829 		  set_block_for_insn (insn, NULL);
4830 		INSN_UID (insn) = 0;
4831 	      }
4832 	}
4833     }
4834 
4835   /* It is very important to decompose the RTL instruction chain here:
4836      debug information keeps pointing into CODE_LABEL insns inside the function
4837      body.  If these remain pointing to the other insns, we end up preserving
4838      whole RTL chain and attached detailed debug info in memory.  */
4839   for (insn = get_insns (); insn; insn = next)
4840     {
4841       next = NEXT_INSN (insn);
4842       SET_NEXT_INSN (insn) = NULL;
4843       SET_PREV_INSN (insn) = NULL;
4844 
4845       rtx_insn *call_insn = insn;
4846       if (NONJUMP_INSN_P (call_insn)
4847 	  && GET_CODE (PATTERN (call_insn)) == SEQUENCE)
4848 	{
4849 	  rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (call_insn));
4850 	  call_insn = seq->insn (0);
4851 	}
4852       if (CALL_P (call_insn))
4853 	{
4854 	  rtx note
4855 	    = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
4856 	  if (note)
4857 	    remove_note (call_insn, note);
4858 	}
4859 
4860       if (final_output
4861 	  && (!NOTE_P (insn)
4862 	      || (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4863 		  && NOTE_KIND (insn) != NOTE_INSN_BEGIN_STMT
4864 		  && NOTE_KIND (insn) != NOTE_INSN_INLINE_ENTRY
4865 		  && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4866 		  && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4867 		  && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4868 	print_rtl_single (final_output, insn);
4869     }
4870 
4871   if (final_output)
4872     {
4873       flag_dump_noaddr = save_noaddr;
4874       flag_dump_unnumbered = save_unnumbered;
4875       final_insns_dump_p = false;
4876 
4877       if (fclose (final_output))
4878 	{
4879 	  error ("could not close final insn dump file %qs: %m",
4880 		 flag_dump_final_insns);
4881 	  flag_dump_final_insns = NULL;
4882 	}
4883     }
4884 
4885   flag_rerun_cse_after_global_opts = 0;
4886   reload_completed = 0;
4887   epilogue_completed = 0;
4888 #ifdef STACK_REGS
4889   regstack_completed = 0;
4890 #endif
4891 
4892   /* Clear out the insn_length contents now that they are no
4893      longer valid.  */
4894   init_insn_lengths ();
4895 
4896   /* Show no temporary slots allocated.  */
4897   init_temp_slots ();
4898 
4899   free_bb_for_insn ();
4900 
4901   if (cfun->gimple_df)
4902     delete_tree_ssa (cfun);
4903 
4904   /* We can reduce stack alignment on call site only when we are sure that
4905      the function body just produced will be actually used in the final
4906      executable.  */
4907   if (flag_ipa_stack_alignment
4908       && decl_binds_to_current_def_p (current_function_decl))
4909     {
4910       unsigned int pref = crtl->preferred_stack_boundary;
4911       if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4912         pref = crtl->stack_alignment_needed;
4913       cgraph_node::rtl_info (current_function_decl)
4914 	->preferred_incoming_stack_boundary = pref;
4915     }
4916 
4917   /* Make sure volatile mem refs aren't considered valid operands for
4918      arithmetic insns.  We must call this here if this is a nested inline
4919      function, since the above code leaves us in the init_recog state,
4920      and the function context push/pop code does not save/restore volatile_ok.
4921 
4922      ??? Maybe it isn't necessary for expand_start_function to call this
4923      anymore if we do it here?  */
4924 
4925   init_recog_no_volatile ();
4926 
4927   /* We're done with this function.  Free up memory if we can.  */
4928   free_after_parsing (cfun);
4929   free_after_compilation (cfun);
4930   return 0;
4931 }
4932 
4933 namespace {
4934 
4935 const pass_data pass_data_clean_state =
4936 {
4937   RTL_PASS, /* type */
4938   "*clean_state", /* name */
4939   OPTGROUP_NONE, /* optinfo_flags */
4940   TV_FINAL, /* tv_id */
4941   0, /* properties_required */
4942   0, /* properties_provided */
4943   PROP_rtl, /* properties_destroyed */
4944   0, /* todo_flags_start */
4945   0, /* todo_flags_finish */
4946 };
4947 
4948 class pass_clean_state : public rtl_opt_pass
4949 {
4950 public:
pass_clean_state(gcc::context * ctxt)4951   pass_clean_state (gcc::context *ctxt)
4952     : rtl_opt_pass (pass_data_clean_state, ctxt)
4953   {}
4954 
4955   /* opt_pass methods: */
execute(function *)4956   virtual unsigned int execute (function *)
4957     {
4958       return rest_of_clean_state ();
4959     }
4960 
4961 }; // class pass_clean_state
4962 
4963 } // anon namespace
4964 
4965 rtl_opt_pass *
make_pass_clean_state(gcc::context * ctxt)4966 make_pass_clean_state (gcc::context *ctxt)
4967 {
4968   return new pass_clean_state (ctxt);
4969 }
4970 
4971 /* Return true if INSN is a call to the current function.  */
4972 
4973 static bool
self_recursive_call_p(rtx_insn * insn)4974 self_recursive_call_p (rtx_insn *insn)
4975 {
4976   tree fndecl = get_call_fndecl (insn);
4977   return (fndecl == current_function_decl
4978 	  && decl_binds_to_current_def_p (fndecl));
4979 }
4980 
4981 /* Collect hard register usage for the current function.  */
4982 
4983 static void
collect_fn_hard_reg_usage(void)4984 collect_fn_hard_reg_usage (void)
4985 {
4986   rtx_insn *insn;
4987 #ifdef STACK_REGS
4988   int i;
4989 #endif
4990   struct cgraph_rtl_info *node;
4991   HARD_REG_SET function_used_regs;
4992 
4993   /* ??? To be removed when all the ports have been fixed.  */
4994   if (!targetm.call_fusage_contains_non_callee_clobbers)
4995     return;
4996 
4997   CLEAR_HARD_REG_SET (function_used_regs);
4998 
4999   for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
5000     {
5001       HARD_REG_SET insn_used_regs;
5002 
5003       if (!NONDEBUG_INSN_P (insn))
5004 	continue;
5005 
5006       if (CALL_P (insn)
5007 	  && !self_recursive_call_p (insn))
5008 	{
5009 	  if (!get_call_reg_set_usage (insn, &insn_used_regs,
5010 				       call_used_reg_set))
5011 	    return;
5012 
5013 	  IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
5014 	}
5015 
5016       find_all_hard_reg_sets (insn, &insn_used_regs, false);
5017       IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
5018     }
5019 
5020   /* Be conservative - mark fixed and global registers as used.  */
5021   IOR_HARD_REG_SET (function_used_regs, fixed_reg_set);
5022 
5023 #ifdef STACK_REGS
5024   /* Handle STACK_REGS conservatively, since the df-framework does not
5025      provide accurate information for them.  */
5026 
5027   for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
5028     SET_HARD_REG_BIT (function_used_regs, i);
5029 #endif
5030 
5031   /* The information we have gathered is only interesting if it exposes a
5032      register from the call_used_regs that is not used in this function.  */
5033   if (hard_reg_set_subset_p (call_used_reg_set, function_used_regs))
5034     return;
5035 
5036   node = cgraph_node::rtl_info (current_function_decl);
5037   gcc_assert (node != NULL);
5038 
5039   COPY_HARD_REG_SET (node->function_used_regs, function_used_regs);
5040   node->function_used_regs_valid = 1;
5041 }
5042 
5043 /* Get the declaration of the function called by INSN.  */
5044 
5045 static tree
get_call_fndecl(rtx_insn * insn)5046 get_call_fndecl (rtx_insn *insn)
5047 {
5048   rtx note, datum;
5049 
5050   note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
5051   if (note == NULL_RTX)
5052     return NULL_TREE;
5053 
5054   datum = XEXP (note, 0);
5055   if (datum != NULL_RTX)
5056     return SYMBOL_REF_DECL (datum);
5057 
5058   return NULL_TREE;
5059 }
5060 
5061 /* Return the cgraph_rtl_info of the function called by INSN.  Returns NULL for
5062    call targets that can be overwritten.  */
5063 
5064 static struct cgraph_rtl_info *
get_call_cgraph_rtl_info(rtx_insn * insn)5065 get_call_cgraph_rtl_info (rtx_insn *insn)
5066 {
5067   tree fndecl;
5068 
5069   if (insn == NULL_RTX)
5070     return NULL;
5071 
5072   fndecl = get_call_fndecl (insn);
5073   if (fndecl == NULL_TREE
5074       || !decl_binds_to_current_def_p (fndecl))
5075     return NULL;
5076 
5077   return cgraph_node::rtl_info (fndecl);
5078 }
5079 
5080 /* Find hard registers used by function call instruction INSN, and return them
5081    in REG_SET.  Return DEFAULT_SET in REG_SET if not found.  */
5082 
5083 bool
get_call_reg_set_usage(rtx_insn * insn,HARD_REG_SET * reg_set,HARD_REG_SET default_set)5084 get_call_reg_set_usage (rtx_insn *insn, HARD_REG_SET *reg_set,
5085 			HARD_REG_SET default_set)
5086 {
5087   if (flag_ipa_ra)
5088     {
5089       struct cgraph_rtl_info *node = get_call_cgraph_rtl_info (insn);
5090       if (node != NULL
5091 	  && node->function_used_regs_valid)
5092 	{
5093 	  COPY_HARD_REG_SET (*reg_set, node->function_used_regs);
5094 	  AND_HARD_REG_SET (*reg_set, default_set);
5095 	  return true;
5096 	}
5097     }
5098   COPY_HARD_REG_SET (*reg_set, default_set);
5099   targetm.remove_extra_call_preserved_regs (insn, reg_set);
5100   return false;
5101 }
5102