1 /* Convert RTL to assembler code and output it, for GNU compiler.
2    Copyright (C) 1987-2014 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This is the final pass of the compiler.
21    It looks at the rtl code for a function and outputs assembler code.
22 
23    Call `final_start_function' to output the assembler code for function entry,
24    `final' to output assembler code for some RTL code,
25    `final_end_function' to output assembler code for function exit.
26    If a function is compiled in several pieces, each piece is
27    output separately with `final'.
28 
29    Some optimizations are also done at this level.
30    Move instructions that were made unnecessary by good register allocation
31    are detected and omitted from the output.  (Though most of these
32    are removed by the last jump pass.)
33 
34    Instructions to set the condition codes are omitted when it can be
35    seen that the condition codes already had the desired values.
36 
37    In some cases it is sufficient if the inherited condition codes
38    have related values, but this may require the following insn
39    (the one that tests the condition codes) to be modified.
40 
41    The code for the function prologue and epilogue are generated
42    directly in assembler by the target functions function_prologue and
43    function_epilogue.  Those instructions never exist as rtl.  */
44 
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 
50 #include "tree.h"
51 #include "varasm.h"
52 #include "rtl.h"
53 #include "tm_p.h"
54 #include "regs.h"
55 #include "insn-config.h"
56 #include "insn-attr.h"
57 #include "recog.h"
58 #include "conditions.h"
59 #include "flags.h"
60 #include "hard-reg-set.h"
61 #include "output.h"
62 #include "except.h"
63 #include "function.h"
64 #include "rtl-error.h"
65 #include "toplev.h" /* exact_log2, floor_log2 */
66 #include "reload.h"
67 #include "intl.h"
68 #include "basic-block.h"
69 #include "target.h"
70 #include "targhooks.h"
71 #include "debug.h"
72 #include "expr.h"
73 #include "tree-pass.h"
74 #include "cgraph.h"
75 #include "tree-ssa.h"
76 #include "coverage.h"
77 #include "df.h"
78 #include "ggc.h"
79 #include "cfgloop.h"
80 #include "params.h"
81 #include "tree-pretty-print.h" /* for dump_function_header */
82 #include "asan.h"
83 
84 #ifdef XCOFF_DEBUGGING_INFO
85 #include "xcoffout.h"		/* Needed for external data
86 				   declarations for e.g. AIX 4.x.  */
87 #endif
88 
89 #include "dwarf2out.h"
90 
91 #ifdef DBX_DEBUGGING_INFO
92 #include "dbxout.h"
93 #endif
94 
95 #ifdef SDB_DEBUGGING_INFO
96 #include "sdbout.h"
97 #endif
98 
99 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
100    So define a null default for it to save conditionalization later.  */
101 #ifndef CC_STATUS_INIT
102 #define CC_STATUS_INIT
103 #endif
104 
105 /* Is the given character a logical line separator for the assembler?  */
106 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
107 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
108 #endif
109 
110 #ifndef JUMP_TABLES_IN_TEXT_SECTION
111 #define JUMP_TABLES_IN_TEXT_SECTION 0
112 #endif
113 
114 /* Bitflags used by final_scan_insn.  */
115 #define SEEN_NOTE	1
116 #define SEEN_EMITTED	2
117 
118 /* Last insn processed by final_scan_insn.  */
119 static rtx debug_insn;
120 rtx current_output_insn;
121 
122 /* Line number of last NOTE.  */
123 static int last_linenum;
124 
125 /* Last discriminator written to assembly.  */
126 static int last_discriminator;
127 
128 /* Discriminator of current block.  */
129 static int discriminator;
130 
131 /* Highest line number in current block.  */
132 static int high_block_linenum;
133 
134 /* Likewise for function.  */
135 static int high_function_linenum;
136 
137 /* Filename of last NOTE.  */
138 static const char *last_filename;
139 
140 /* Override filename and line number.  */
141 static const char *override_filename;
142 static int override_linenum;
143 
144 /* Whether to force emission of a line note before the next insn.  */
145 static bool force_source_line = false;
146 
147 extern const int length_unit_log; /* This is defined in insn-attrtab.c.  */
148 
149 /* Nonzero while outputting an `asm' with operands.
150    This means that inconsistencies are the user's fault, so don't die.
151    The precise value is the insn being output, to pass to error_for_asm.  */
152 rtx this_is_asm_operands;
153 
154 /* Number of operands of this insn, for an `asm' with operands.  */
155 static unsigned int insn_noperands;
156 
157 /* Compare optimization flag.  */
158 
159 static rtx last_ignored_compare = 0;
160 
161 /* Assign a unique number to each insn that is output.
162    This can be used to generate unique local labels.  */
163 
164 static int insn_counter = 0;
165 
166 #ifdef HAVE_cc0
167 /* This variable contains machine-dependent flags (defined in tm.h)
168    set and examined by output routines
169    that describe how to interpret the condition codes properly.  */
170 
171 CC_STATUS cc_status;
172 
173 /* During output of an insn, this contains a copy of cc_status
174    from before the insn.  */
175 
176 CC_STATUS cc_prev_status;
177 #endif
178 
179 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen.  */
180 
181 static int block_depth;
182 
183 /* Nonzero if have enabled APP processing of our assembler output.  */
184 
185 static int app_on;
186 
187 /* If we are outputting an insn sequence, this contains the sequence rtx.
188    Zero otherwise.  */
189 
190 rtx final_sequence;
191 
192 #ifdef ASSEMBLER_DIALECT
193 
194 /* Number of the assembler dialect to use, starting at 0.  */
195 static int dialect_number;
196 #endif
197 
198 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern.  */
199 rtx current_insn_predicate;
200 
201 /* True if printing into -fdump-final-insns= dump.  */
202 bool final_insns_dump_p;
203 
204 /* True if profile_function should be called, but hasn't been called yet.  */
205 static bool need_profile_function;
206 
207 static int asm_insn_count (rtx);
208 static void profile_function (FILE *);
209 static void profile_after_prologue (FILE *);
210 static bool notice_source_line (rtx, bool *);
211 static rtx walk_alter_subreg (rtx *, bool *);
212 static void output_asm_name (void);
213 static void output_alternate_entry_point (FILE *, rtx);
214 static tree get_mem_expr_from_op (rtx, int *);
215 static void output_asm_operand_names (rtx *, int *, int);
216 #ifdef LEAF_REGISTERS
217 static void leaf_renumber_regs (rtx);
218 #endif
219 #ifdef HAVE_cc0
220 static int alter_cond (rtx);
221 #endif
222 #ifndef ADDR_VEC_ALIGN
223 static int final_addr_vec_align (rtx);
224 #endif
225 static int align_fuzz (rtx, rtx, int, unsigned);
226 
227 /* Initialize data in final at the beginning of a compilation.  */
228 
229 void
init_final(const char * filename ATTRIBUTE_UNUSED)230 init_final (const char *filename ATTRIBUTE_UNUSED)
231 {
232   app_on = 0;
233   final_sequence = 0;
234 
235 #ifdef ASSEMBLER_DIALECT
236   dialect_number = ASSEMBLER_DIALECT;
237 #endif
238 }
239 
240 /* Default target function prologue and epilogue assembler output.
241 
242    If not overridden for epilogue code, then the function body itself
243    contains return instructions wherever needed.  */
244 void
default_function_pro_epilogue(FILE * file ATTRIBUTE_UNUSED,HOST_WIDE_INT size ATTRIBUTE_UNUSED)245 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
246 			       HOST_WIDE_INT size ATTRIBUTE_UNUSED)
247 {
248 }
249 
250 void
default_function_switched_text_sections(FILE * file ATTRIBUTE_UNUSED,tree decl ATTRIBUTE_UNUSED,bool new_is_cold ATTRIBUTE_UNUSED)251 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
252 					 tree decl ATTRIBUTE_UNUSED,
253 					 bool new_is_cold ATTRIBUTE_UNUSED)
254 {
255 }
256 
257 /* Default target hook that outputs nothing to a stream.  */
258 void
no_asm_to_stream(FILE * file ATTRIBUTE_UNUSED)259 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
260 {
261 }
262 
263 /* Enable APP processing of subsequent output.
264    Used before the output from an `asm' statement.  */
265 
266 void
app_enable(void)267 app_enable (void)
268 {
269   if (! app_on)
270     {
271       fputs (ASM_APP_ON, asm_out_file);
272       app_on = 1;
273     }
274 }
275 
276 /* Disable APP processing of subsequent output.
277    Called from varasm.c before most kinds of output.  */
278 
279 void
app_disable(void)280 app_disable (void)
281 {
282   if (app_on)
283     {
284       fputs (ASM_APP_OFF, asm_out_file);
285       app_on = 0;
286     }
287 }
288 
289 /* Return the number of slots filled in the current
290    delayed branch sequence (we don't count the insn needing the
291    delay slot).   Zero if not in a delayed branch sequence.  */
292 
293 #ifdef DELAY_SLOTS
294 int
dbr_sequence_length(void)295 dbr_sequence_length (void)
296 {
297   if (final_sequence != 0)
298     return XVECLEN (final_sequence, 0) - 1;
299   else
300     return 0;
301 }
302 #endif
303 
304 /* The next two pages contain routines used to compute the length of an insn
305    and to shorten branches.  */
306 
307 /* Arrays for insn lengths, and addresses.  The latter is referenced by
308    `insn_current_length'.  */
309 
310 static int *insn_lengths;
311 
312 vec<int> insn_addresses_;
313 
314 /* Max uid for which the above arrays are valid.  */
315 static int insn_lengths_max_uid;
316 
317 /* Address of insn being processed.  Used by `insn_current_length'.  */
318 int insn_current_address;
319 
320 /* Address of insn being processed in previous iteration.  */
321 int insn_last_address;
322 
323 /* known invariant alignment of insn being processed.  */
324 int insn_current_align;
325 
326 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
327    gives the next following alignment insn that increases the known
328    alignment, or NULL_RTX if there is no such insn.
329    For any alignment obtained this way, we can again index uid_align with
330    its uid to obtain the next following align that in turn increases the
331    alignment, till we reach NULL_RTX; the sequence obtained this way
332    for each insn we'll call the alignment chain of this insn in the following
333    comments.  */
334 
335 struct label_alignment
336 {
337   short alignment;
338   short max_skip;
339 };
340 
341 static rtx *uid_align;
342 static int *uid_shuid;
343 static struct label_alignment *label_align;
344 
345 /* Indicate that branch shortening hasn't yet been done.  */
346 
347 void
init_insn_lengths(void)348 init_insn_lengths (void)
349 {
350   if (uid_shuid)
351     {
352       free (uid_shuid);
353       uid_shuid = 0;
354     }
355   if (insn_lengths)
356     {
357       free (insn_lengths);
358       insn_lengths = 0;
359       insn_lengths_max_uid = 0;
360     }
361   if (HAVE_ATTR_length)
362     INSN_ADDRESSES_FREE ();
363   if (uid_align)
364     {
365       free (uid_align);
366       uid_align = 0;
367     }
368 }
369 
370 /* Obtain the current length of an insn.  If branch shortening has been done,
371    get its actual length.  Otherwise, use FALLBACK_FN to calculate the
372    length.  */
373 static inline int
get_attr_length_1(rtx insn,int (* fallback_fn)(rtx))374 get_attr_length_1 (rtx insn, int (*fallback_fn) (rtx))
375 {
376   rtx body;
377   int i;
378   int length = 0;
379 
380   if (!HAVE_ATTR_length)
381     return 0;
382 
383   if (insn_lengths_max_uid > INSN_UID (insn))
384     return insn_lengths[INSN_UID (insn)];
385   else
386     switch (GET_CODE (insn))
387       {
388       case NOTE:
389       case BARRIER:
390       case CODE_LABEL:
391       case DEBUG_INSN:
392 	return 0;
393 
394       case CALL_INSN:
395       case JUMP_INSN:
396 	length = fallback_fn (insn);
397 	break;
398 
399       case INSN:
400 	body = PATTERN (insn);
401 	if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
402 	  return 0;
403 
404 	else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
405 	  length = asm_insn_count (body) * fallback_fn (insn);
406 	else if (GET_CODE (body) == SEQUENCE)
407 	  for (i = 0; i < XVECLEN (body, 0); i++)
408 	    length += get_attr_length_1 (XVECEXP (body, 0, i), fallback_fn);
409 	else
410 	  length = fallback_fn (insn);
411 	break;
412 
413       default:
414 	break;
415       }
416 
417 #ifdef ADJUST_INSN_LENGTH
418   ADJUST_INSN_LENGTH (insn, length);
419 #endif
420   return length;
421 }
422 
423 /* Obtain the current length of an insn.  If branch shortening has been done,
424    get its actual length.  Otherwise, get its maximum length.  */
425 int
get_attr_length(rtx insn)426 get_attr_length (rtx insn)
427 {
428   return get_attr_length_1 (insn, insn_default_length);
429 }
430 
431 /* Obtain the current length of an insn.  If branch shortening has been done,
432    get its actual length.  Otherwise, get its minimum length.  */
433 int
get_attr_min_length(rtx insn)434 get_attr_min_length (rtx insn)
435 {
436   return get_attr_length_1 (insn, insn_min_length);
437 }
438 
439 /* Code to handle alignment inside shorten_branches.  */
440 
441 /* Here is an explanation how the algorithm in align_fuzz can give
442    proper results:
443 
444    Call a sequence of instructions beginning with alignment point X
445    and continuing until the next alignment point `block X'.  When `X'
446    is used in an expression, it means the alignment value of the
447    alignment point.
448 
449    Call the distance between the start of the first insn of block X, and
450    the end of the last insn of block X `IX', for the `inner size of X'.
451    This is clearly the sum of the instruction lengths.
452 
453    Likewise with the next alignment-delimited block following X, which we
454    shall call block Y.
455 
456    Call the distance between the start of the first insn of block X, and
457    the start of the first insn of block Y `OX', for the `outer size of X'.
458 
459    The estimated padding is then OX - IX.
460 
461    OX can be safely estimated as
462 
463            if (X >= Y)
464                    OX = round_up(IX, Y)
465            else
466                    OX = round_up(IX, X) + Y - X
467 
468    Clearly est(IX) >= real(IX), because that only depends on the
469    instruction lengths, and those being overestimated is a given.
470 
471    Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
472    we needn't worry about that when thinking about OX.
473 
474    When X >= Y, the alignment provided by Y adds no uncertainty factor
475    for branch ranges starting before X, so we can just round what we have.
476    But when X < Y, we don't know anything about the, so to speak,
477    `middle bits', so we have to assume the worst when aligning up from an
478    address mod X to one mod Y, which is Y - X.  */
479 
480 #ifndef LABEL_ALIGN
481 #define LABEL_ALIGN(LABEL) align_labels_log
482 #endif
483 
484 #ifndef LOOP_ALIGN
485 #define LOOP_ALIGN(LABEL) align_loops_log
486 #endif
487 
488 #ifndef LABEL_ALIGN_AFTER_BARRIER
489 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
490 #endif
491 
492 #ifndef JUMP_ALIGN
493 #define JUMP_ALIGN(LABEL) align_jumps_log
494 #endif
495 
496 int
default_label_align_after_barrier_max_skip(rtx insn ATTRIBUTE_UNUSED)497 default_label_align_after_barrier_max_skip (rtx insn ATTRIBUTE_UNUSED)
498 {
499   return 0;
500 }
501 
502 int
default_loop_align_max_skip(rtx insn ATTRIBUTE_UNUSED)503 default_loop_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
504 {
505   return align_loops_max_skip;
506 }
507 
508 int
default_label_align_max_skip(rtx insn ATTRIBUTE_UNUSED)509 default_label_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
510 {
511   return align_labels_max_skip;
512 }
513 
514 int
default_jump_align_max_skip(rtx insn ATTRIBUTE_UNUSED)515 default_jump_align_max_skip (rtx insn ATTRIBUTE_UNUSED)
516 {
517   return align_jumps_max_skip;
518 }
519 
520 #ifndef ADDR_VEC_ALIGN
521 static int
final_addr_vec_align(rtx addr_vec)522 final_addr_vec_align (rtx addr_vec)
523 {
524   int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
525 
526   if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
527     align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
528   return exact_log2 (align);
529 
530 }
531 
532 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
533 #endif
534 
535 #ifndef INSN_LENGTH_ALIGNMENT
536 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
537 #endif
538 
539 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
540 
541 static int min_labelno, max_labelno;
542 
543 #define LABEL_TO_ALIGNMENT(LABEL) \
544   (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
545 
546 #define LABEL_TO_MAX_SKIP(LABEL) \
547   (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
548 
549 /* For the benefit of port specific code do this also as a function.  */
550 
551 int
label_to_alignment(rtx label)552 label_to_alignment (rtx label)
553 {
554   if (CODE_LABEL_NUMBER (label) <= max_labelno)
555     return LABEL_TO_ALIGNMENT (label);
556   return 0;
557 }
558 
559 int
label_to_max_skip(rtx label)560 label_to_max_skip (rtx label)
561 {
562   if (CODE_LABEL_NUMBER (label) <= max_labelno)
563     return LABEL_TO_MAX_SKIP (label);
564   return 0;
565 }
566 
567 /* The differences in addresses
568    between a branch and its target might grow or shrink depending on
569    the alignment the start insn of the range (the branch for a forward
570    branch or the label for a backward branch) starts out on; if these
571    differences are used naively, they can even oscillate infinitely.
572    We therefore want to compute a 'worst case' address difference that
573    is independent of the alignment the start insn of the range end
574    up on, and that is at least as large as the actual difference.
575    The function align_fuzz calculates the amount we have to add to the
576    naively computed difference, by traversing the part of the alignment
577    chain of the start insn of the range that is in front of the end insn
578    of the range, and considering for each alignment the maximum amount
579    that it might contribute to a size increase.
580 
581    For casesi tables, we also want to know worst case minimum amounts of
582    address difference, in case a machine description wants to introduce
583    some common offset that is added to all offsets in a table.
584    For this purpose, align_fuzz with a growth argument of 0 computes the
585    appropriate adjustment.  */
586 
587 /* Compute the maximum delta by which the difference of the addresses of
588    START and END might grow / shrink due to a different address for start
589    which changes the size of alignment insns between START and END.
590    KNOWN_ALIGN_LOG is the alignment known for START.
591    GROWTH should be ~0 if the objective is to compute potential code size
592    increase, and 0 if the objective is to compute potential shrink.
593    The return value is undefined for any other value of GROWTH.  */
594 
595 static int
align_fuzz(rtx start,rtx end,int known_align_log,unsigned int growth)596 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
597 {
598   int uid = INSN_UID (start);
599   rtx align_label;
600   int known_align = 1 << known_align_log;
601   int end_shuid = INSN_SHUID (end);
602   int fuzz = 0;
603 
604   for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
605     {
606       int align_addr, new_align;
607 
608       uid = INSN_UID (align_label);
609       align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
610       if (uid_shuid[uid] > end_shuid)
611 	break;
612       known_align_log = LABEL_TO_ALIGNMENT (align_label);
613       new_align = 1 << known_align_log;
614       if (new_align < known_align)
615 	continue;
616       fuzz += (-align_addr ^ growth) & (new_align - known_align);
617       known_align = new_align;
618     }
619   return fuzz;
620 }
621 
622 /* Compute a worst-case reference address of a branch so that it
623    can be safely used in the presence of aligned labels.  Since the
624    size of the branch itself is unknown, the size of the branch is
625    not included in the range.  I.e. for a forward branch, the reference
626    address is the end address of the branch as known from the previous
627    branch shortening pass, minus a value to account for possible size
628    increase due to alignment.  For a backward branch, it is the start
629    address of the branch as known from the current pass, plus a value
630    to account for possible size increase due to alignment.
631    NB.: Therefore, the maximum offset allowed for backward branches needs
632    to exclude the branch size.  */
633 
634 int
insn_current_reference_address(rtx branch)635 insn_current_reference_address (rtx branch)
636 {
637   rtx dest, seq;
638   int seq_uid;
639 
640   if (! INSN_ADDRESSES_SET_P ())
641     return 0;
642 
643   seq = NEXT_INSN (PREV_INSN (branch));
644   seq_uid = INSN_UID (seq);
645   if (!JUMP_P (branch))
646     /* This can happen for example on the PA; the objective is to know the
647        offset to address something in front of the start of the function.
648        Thus, we can treat it like a backward branch.
649        We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
650        any alignment we'd encounter, so we skip the call to align_fuzz.  */
651     return insn_current_address;
652   dest = JUMP_LABEL (branch);
653 
654   /* BRANCH has no proper alignment chain set, so use SEQ.
655      BRANCH also has no INSN_SHUID.  */
656   if (INSN_SHUID (seq) < INSN_SHUID (dest))
657     {
658       /* Forward branch.  */
659       return (insn_last_address + insn_lengths[seq_uid]
660 	      - align_fuzz (seq, dest, length_unit_log, ~0));
661     }
662   else
663     {
664       /* Backward branch.  */
665       return (insn_current_address
666 	      + align_fuzz (dest, seq, length_unit_log, ~0));
667     }
668 }
669 
670 /* Compute branch alignments based on frequency information in the
671    CFG.  */
672 
673 unsigned int
compute_alignments(void)674 compute_alignments (void)
675 {
676   int log, max_skip, max_log;
677   basic_block bb;
678   int freq_max = 0;
679   int freq_threshold = 0;
680 
681   if (label_align)
682     {
683       free (label_align);
684       label_align = 0;
685     }
686 
687   max_labelno = max_label_num ();
688   min_labelno = get_first_label_num ();
689   label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
690 
691   /* If not optimizing or optimizing for size, don't assign any alignments.  */
692   if (! optimize || optimize_function_for_size_p (cfun))
693     return 0;
694 
695   if (dump_file)
696     {
697       dump_reg_info (dump_file);
698       dump_flow_info (dump_file, TDF_DETAILS);
699       flow_loops_dump (dump_file, NULL, 1);
700     }
701   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
702   FOR_EACH_BB_FN (bb, cfun)
703     if (bb->frequency > freq_max)
704       freq_max = bb->frequency;
705   freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
706 
707   if (dump_file)
708     fprintf (dump_file, "freq_max: %i\n",freq_max);
709   FOR_EACH_BB_FN (bb, cfun)
710     {
711       rtx label = BB_HEAD (bb);
712       int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
713       edge e;
714       edge_iterator ei;
715 
716       if (!LABEL_P (label)
717 	  || optimize_bb_for_size_p (bb))
718 	{
719 	  if (dump_file)
720 	    fprintf (dump_file,
721 		     "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
722 		     bb->index, bb->frequency, bb->loop_father->num,
723 		     bb_loop_depth (bb));
724 	  continue;
725 	}
726       max_log = LABEL_ALIGN (label);
727       max_skip = targetm.asm_out.label_align_max_skip (label);
728 
729       FOR_EACH_EDGE (e, ei, bb->preds)
730 	{
731 	  if (e->flags & EDGE_FALLTHRU)
732 	    has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
733 	  else
734 	    branch_frequency += EDGE_FREQUENCY (e);
735 	}
736       if (dump_file)
737 	{
738 	  fprintf (dump_file, "BB %4i freq %4i loop %2i loop_depth"
739 		   " %2i fall %4i branch %4i",
740 		   bb->index, bb->frequency, bb->loop_father->num,
741 		   bb_loop_depth (bb),
742 		   fallthru_frequency, branch_frequency);
743 	  if (!bb->loop_father->inner && bb->loop_father->num)
744 	    fprintf (dump_file, " inner_loop");
745 	  if (bb->loop_father->header == bb)
746 	    fprintf (dump_file, " loop_header");
747 	  fprintf (dump_file, "\n");
748 	}
749 
750       /* There are two purposes to align block with no fallthru incoming edge:
751 	 1) to avoid fetch stalls when branch destination is near cache boundary
752 	 2) to improve cache efficiency in case the previous block is not executed
753 	    (so it does not need to be in the cache).
754 
755 	 We to catch first case, we align frequently executed blocks.
756 	 To catch the second, we align blocks that are executed more frequently
757 	 than the predecessor and the predecessor is likely to not be executed
758 	 when function is called.  */
759 
760       if (!has_fallthru
761 	  && (branch_frequency > freq_threshold
762 	      || (bb->frequency > bb->prev_bb->frequency * 10
763 		  && (bb->prev_bb->frequency
764 		      <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency / 2))))
765 	{
766 	  log = JUMP_ALIGN (label);
767 	  if (dump_file)
768 	    fprintf (dump_file, "  jump alignment added.\n");
769 	  if (max_log < log)
770 	    {
771 	      max_log = log;
772 	      max_skip = targetm.asm_out.jump_align_max_skip (label);
773 	    }
774 	}
775       /* In case block is frequent and reached mostly by non-fallthru edge,
776 	 align it.  It is most likely a first block of loop.  */
777       if (has_fallthru
778 	  && optimize_bb_for_speed_p (bb)
779 	  && branch_frequency + fallthru_frequency > freq_threshold
780 	  && (branch_frequency
781 	      > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
782 	{
783 	  log = LOOP_ALIGN (label);
784 	  if (dump_file)
785 	    fprintf (dump_file, "  internal loop alignment added.\n");
786 	  if (max_log < log)
787 	    {
788 	      max_log = log;
789 	      max_skip = targetm.asm_out.loop_align_max_skip (label);
790 	    }
791 	}
792       LABEL_TO_ALIGNMENT (label) = max_log;
793       LABEL_TO_MAX_SKIP (label) = max_skip;
794     }
795 
796   loop_optimizer_finalize ();
797   free_dominance_info (CDI_DOMINATORS);
798   return 0;
799 }
800 
801 /* Grow the LABEL_ALIGN array after new labels are created.  */
802 
803 static void
grow_label_align(void)804 grow_label_align (void)
805 {
806   int old = max_labelno;
807   int n_labels;
808   int n_old_labels;
809 
810   max_labelno = max_label_num ();
811 
812   n_labels = max_labelno - min_labelno + 1;
813   n_old_labels = old - min_labelno + 1;
814 
815   label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
816 
817   /* Range of labels grows monotonically in the function.  Failing here
818      means that the initialization of array got lost.  */
819   gcc_assert (n_old_labels <= n_labels);
820 
821   memset (label_align + n_old_labels, 0,
822           (n_labels - n_old_labels) * sizeof (struct label_alignment));
823 }
824 
825 /* Update the already computed alignment information.  LABEL_PAIRS is a vector
826    made up of pairs of labels for which the alignment information of the first
827    element will be copied from that of the second element.  */
828 
829 void
update_alignments(vec<rtx> & label_pairs)830 update_alignments (vec<rtx> &label_pairs)
831 {
832   unsigned int i = 0;
833   rtx iter, label = NULL_RTX;
834 
835   if (max_labelno != max_label_num ())
836     grow_label_align ();
837 
838   FOR_EACH_VEC_ELT (label_pairs, i, iter)
839     if (i & 1)
840       {
841 	LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
842 	LABEL_TO_MAX_SKIP (label) = LABEL_TO_MAX_SKIP (iter);
843       }
844     else
845       label = iter;
846 }
847 
848 namespace {
849 
850 const pass_data pass_data_compute_alignments =
851 {
852   RTL_PASS, /* type */
853   "alignments", /* name */
854   OPTGROUP_NONE, /* optinfo_flags */
855   false, /* has_gate */
856   true, /* has_execute */
857   TV_NONE, /* tv_id */
858   0, /* properties_required */
859   0, /* properties_provided */
860   0, /* properties_destroyed */
861   0, /* todo_flags_start */
862   TODO_verify_rtl_sharing, /* todo_flags_finish */
863 };
864 
865 class pass_compute_alignments : public rtl_opt_pass
866 {
867 public:
pass_compute_alignments(gcc::context * ctxt)868   pass_compute_alignments (gcc::context *ctxt)
869     : rtl_opt_pass (pass_data_compute_alignments, ctxt)
870   {}
871 
872   /* opt_pass methods: */
execute()873   unsigned int execute () { return compute_alignments (); }
874 
875 }; // class pass_compute_alignments
876 
877 } // anon namespace
878 
879 rtl_opt_pass *
make_pass_compute_alignments(gcc::context * ctxt)880 make_pass_compute_alignments (gcc::context *ctxt)
881 {
882   return new pass_compute_alignments (ctxt);
883 }
884 
885 
886 /* Make a pass over all insns and compute their actual lengths by shortening
887    any branches of variable length if possible.  */
888 
889 /* shorten_branches might be called multiple times:  for example, the SH
890    port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
891    In order to do this, it needs proper length information, which it obtains
892    by calling shorten_branches.  This cannot be collapsed with
893    shorten_branches itself into a single pass unless we also want to integrate
894    reorg.c, since the branch splitting exposes new instructions with delay
895    slots.  */
896 
897 void
shorten_branches(rtx first)898 shorten_branches (rtx first)
899 {
900   rtx insn;
901   int max_uid;
902   int i;
903   int max_log;
904   int max_skip;
905 #define MAX_CODE_ALIGN 16
906   rtx seq;
907   int something_changed = 1;
908   char *varying_length;
909   rtx body;
910   int uid;
911   rtx align_tab[MAX_CODE_ALIGN];
912 
913   /* Compute maximum UID and allocate label_align / uid_shuid.  */
914   max_uid = get_max_uid ();
915 
916   /* Free uid_shuid before reallocating it.  */
917   free (uid_shuid);
918 
919   uid_shuid = XNEWVEC (int, max_uid);
920 
921   if (max_labelno != max_label_num ())
922     grow_label_align ();
923 
924   /* Initialize label_align and set up uid_shuid to be strictly
925      monotonically rising with insn order.  */
926   /* We use max_log here to keep track of the maximum alignment we want to
927      impose on the next CODE_LABEL (or the current one if we are processing
928      the CODE_LABEL itself).  */
929 
930   max_log = 0;
931   max_skip = 0;
932 
933   for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
934     {
935       int log;
936 
937       INSN_SHUID (insn) = i++;
938       if (INSN_P (insn))
939 	continue;
940 
941       if (LABEL_P (insn))
942 	{
943 	  rtx next;
944 	  bool next_is_jumptable;
945 
946 	  /* Merge in alignments computed by compute_alignments.  */
947 	  log = LABEL_TO_ALIGNMENT (insn);
948 	  if (max_log < log)
949 	    {
950 	      max_log = log;
951 	      max_skip = LABEL_TO_MAX_SKIP (insn);
952 	    }
953 
954 	  next = next_nonnote_insn (insn);
955 	  next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
956 	  if (!next_is_jumptable)
957 	    {
958 	      log = LABEL_ALIGN (insn);
959 	      if (max_log < log)
960 		{
961 		  max_log = log;
962 		  max_skip = targetm.asm_out.label_align_max_skip (insn);
963 		}
964 	    }
965 	  /* ADDR_VECs only take room if read-only data goes into the text
966 	     section.  */
967 	  if ((JUMP_TABLES_IN_TEXT_SECTION
968 	       || readonly_data_section == text_section)
969 	      && next_is_jumptable)
970 	    {
971 	      log = ADDR_VEC_ALIGN (next);
972 	      if (max_log < log)
973 		{
974 		  max_log = log;
975 		  max_skip = targetm.asm_out.label_align_max_skip (insn);
976 		}
977 	    }
978 	  LABEL_TO_ALIGNMENT (insn) = max_log;
979 	  LABEL_TO_MAX_SKIP (insn) = max_skip;
980 	  max_log = 0;
981 	  max_skip = 0;
982 	}
983       else if (BARRIER_P (insn))
984 	{
985 	  rtx label;
986 
987 	  for (label = insn; label && ! INSN_P (label);
988 	       label = NEXT_INSN (label))
989 	    if (LABEL_P (label))
990 	      {
991 		log = LABEL_ALIGN_AFTER_BARRIER (insn);
992 		if (max_log < log)
993 		  {
994 		    max_log = log;
995 		    max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
996 		  }
997 		break;
998 	      }
999 	}
1000     }
1001   if (!HAVE_ATTR_length)
1002     return;
1003 
1004   /* Allocate the rest of the arrays.  */
1005   insn_lengths = XNEWVEC (int, max_uid);
1006   insn_lengths_max_uid = max_uid;
1007   /* Syntax errors can lead to labels being outside of the main insn stream.
1008      Initialize insn_addresses, so that we get reproducible results.  */
1009   INSN_ADDRESSES_ALLOC (max_uid);
1010 
1011   varying_length = XCNEWVEC (char, max_uid);
1012 
1013   /* Initialize uid_align.  We scan instructions
1014      from end to start, and keep in align_tab[n] the last seen insn
1015      that does an alignment of at least n+1, i.e. the successor
1016      in the alignment chain for an insn that does / has a known
1017      alignment of n.  */
1018   uid_align = XCNEWVEC (rtx, max_uid);
1019 
1020   for (i = MAX_CODE_ALIGN; --i >= 0;)
1021     align_tab[i] = NULL_RTX;
1022   seq = get_last_insn ();
1023   for (; seq; seq = PREV_INSN (seq))
1024     {
1025       int uid = INSN_UID (seq);
1026       int log;
1027       log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
1028       uid_align[uid] = align_tab[0];
1029       if (log)
1030 	{
1031 	  /* Found an alignment label.  */
1032 	  uid_align[uid] = align_tab[log];
1033 	  for (i = log - 1; i >= 0; i--)
1034 	    align_tab[i] = seq;
1035 	}
1036     }
1037 
1038   /* When optimizing, we start assuming minimum length, and keep increasing
1039      lengths as we find the need for this, till nothing changes.
1040      When not optimizing, we start assuming maximum lengths, and
1041      do a single pass to update the lengths.  */
1042   bool increasing = optimize != 0;
1043 
1044 #ifdef CASE_VECTOR_SHORTEN_MODE
1045   if (optimize)
1046     {
1047       /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1048          label fields.  */
1049 
1050       int min_shuid = INSN_SHUID (get_insns ()) - 1;
1051       int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1052       int rel;
1053 
1054       for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1055 	{
1056 	  rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1057 	  int len, i, min, max, insn_shuid;
1058 	  int min_align;
1059 	  addr_diff_vec_flags flags;
1060 
1061 	  if (! JUMP_TABLE_DATA_P (insn)
1062 	      || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1063 	    continue;
1064 	  pat = PATTERN (insn);
1065 	  len = XVECLEN (pat, 1);
1066 	  gcc_assert (len > 0);
1067 	  min_align = MAX_CODE_ALIGN;
1068 	  for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1069 	    {
1070 	      rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1071 	      int shuid = INSN_SHUID (lab);
1072 	      if (shuid < min)
1073 		{
1074 		  min = shuid;
1075 		  min_lab = lab;
1076 		}
1077 	      if (shuid > max)
1078 		{
1079 		  max = shuid;
1080 		  max_lab = lab;
1081 		}
1082 	      if (min_align > LABEL_TO_ALIGNMENT (lab))
1083 		min_align = LABEL_TO_ALIGNMENT (lab);
1084 	    }
1085 	  XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1086 	  XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1087 	  insn_shuid = INSN_SHUID (insn);
1088 	  rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1089 	  memset (&flags, 0, sizeof (flags));
1090 	  flags.min_align = min_align;
1091 	  flags.base_after_vec = rel > insn_shuid;
1092 	  flags.min_after_vec  = min > insn_shuid;
1093 	  flags.max_after_vec  = max > insn_shuid;
1094 	  flags.min_after_base = min > rel;
1095 	  flags.max_after_base = max > rel;
1096 	  ADDR_DIFF_VEC_FLAGS (pat) = flags;
1097 
1098 	  if (increasing)
1099 	    PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1100 	}
1101     }
1102 #endif /* CASE_VECTOR_SHORTEN_MODE */
1103 
1104   /* Compute initial lengths, addresses, and varying flags for each insn.  */
1105   int (*length_fun) (rtx) = increasing ? insn_min_length : insn_default_length;
1106 
1107   for (insn_current_address = 0, insn = first;
1108        insn != 0;
1109        insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1110     {
1111       uid = INSN_UID (insn);
1112 
1113       insn_lengths[uid] = 0;
1114 
1115       if (LABEL_P (insn))
1116 	{
1117 	  int log = LABEL_TO_ALIGNMENT (insn);
1118 	  if (log)
1119 	    {
1120 	      int align = 1 << log;
1121 	      int new_address = (insn_current_address + align - 1) & -align;
1122 	      insn_lengths[uid] = new_address - insn_current_address;
1123 	    }
1124 	}
1125 
1126       INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1127 
1128       if (NOTE_P (insn) || BARRIER_P (insn)
1129 	  || LABEL_P (insn) || DEBUG_INSN_P (insn))
1130 	continue;
1131       if (INSN_DELETED_P (insn))
1132 	continue;
1133 
1134       body = PATTERN (insn);
1135       if (JUMP_TABLE_DATA_P (insn))
1136 	{
1137 	  /* This only takes room if read-only data goes into the text
1138 	     section.  */
1139 	  if (JUMP_TABLES_IN_TEXT_SECTION
1140 	      || readonly_data_section == text_section)
1141 	    insn_lengths[uid] = (XVECLEN (body,
1142 					  GET_CODE (body) == ADDR_DIFF_VEC)
1143 				 * GET_MODE_SIZE (GET_MODE (body)));
1144 	  /* Alignment is handled by ADDR_VEC_ALIGN.  */
1145 	}
1146       else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1147 	insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1148       else if (GET_CODE (body) == SEQUENCE)
1149 	{
1150 	  int i;
1151 	  int const_delay_slots;
1152 #ifdef DELAY_SLOTS
1153 	  const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1154 #else
1155 	  const_delay_slots = 0;
1156 #endif
1157 	  int (*inner_length_fun) (rtx)
1158 	    = const_delay_slots ? length_fun : insn_default_length;
1159 	  /* Inside a delay slot sequence, we do not do any branch shortening
1160 	     if the shortening could change the number of delay slots
1161 	     of the branch.  */
1162 	  for (i = 0; i < XVECLEN (body, 0); i++)
1163 	    {
1164 	      rtx inner_insn = XVECEXP (body, 0, i);
1165 	      int inner_uid = INSN_UID (inner_insn);
1166 	      int inner_length;
1167 
1168 	      if (GET_CODE (body) == ASM_INPUT
1169 		  || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1170 		inner_length = (asm_insn_count (PATTERN (inner_insn))
1171 				* insn_default_length (inner_insn));
1172 	      else
1173 		inner_length = inner_length_fun (inner_insn);
1174 
1175 	      insn_lengths[inner_uid] = inner_length;
1176 	      if (const_delay_slots)
1177 		{
1178 		  if ((varying_length[inner_uid]
1179 		       = insn_variable_length_p (inner_insn)) != 0)
1180 		    varying_length[uid] = 1;
1181 		  INSN_ADDRESSES (inner_uid) = (insn_current_address
1182 						+ insn_lengths[uid]);
1183 		}
1184 	      else
1185 		varying_length[inner_uid] = 0;
1186 	      insn_lengths[uid] += inner_length;
1187 	    }
1188 	}
1189       else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1190 	{
1191 	  insn_lengths[uid] = length_fun (insn);
1192 	  varying_length[uid] = insn_variable_length_p (insn);
1193 	}
1194 
1195       /* If needed, do any adjustment.  */
1196 #ifdef ADJUST_INSN_LENGTH
1197       ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1198       if (insn_lengths[uid] < 0)
1199 	fatal_insn ("negative insn length", insn);
1200 #endif
1201     }
1202 
1203   /* Now loop over all the insns finding varying length insns.  For each,
1204      get the current insn length.  If it has changed, reflect the change.
1205      When nothing changes for a full pass, we are done.  */
1206 
1207   while (something_changed)
1208     {
1209       something_changed = 0;
1210       insn_current_align = MAX_CODE_ALIGN - 1;
1211       for (insn_current_address = 0, insn = first;
1212 	   insn != 0;
1213 	   insn = NEXT_INSN (insn))
1214 	{
1215 	  int new_length;
1216 #ifdef ADJUST_INSN_LENGTH
1217 	  int tmp_length;
1218 #endif
1219 	  int length_align;
1220 
1221 	  uid = INSN_UID (insn);
1222 
1223 	  if (LABEL_P (insn))
1224 	    {
1225 	      int log = LABEL_TO_ALIGNMENT (insn);
1226 
1227 #ifdef CASE_VECTOR_SHORTEN_MODE
1228 	      /* If the mode of a following jump table was changed, we
1229 		 may need to update the alignment of this label.  */
1230 	      rtx next;
1231 	      bool next_is_jumptable;
1232 
1233 	      next = next_nonnote_insn (insn);
1234 	      next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
1235 	      if ((JUMP_TABLES_IN_TEXT_SECTION
1236 		   || readonly_data_section == text_section)
1237 		  && next_is_jumptable)
1238 		{
1239 		  int newlog = ADDR_VEC_ALIGN (next);
1240 		  if (newlog != log)
1241 		    {
1242 		      log = newlog;
1243 		      LABEL_TO_ALIGNMENT (insn) = log;
1244 		      something_changed = 1;
1245 		    }
1246 		}
1247 #endif
1248 
1249 	      if (log > insn_current_align)
1250 		{
1251 		  int align = 1 << log;
1252 		  int new_address= (insn_current_address + align - 1) & -align;
1253 		  insn_lengths[uid] = new_address - insn_current_address;
1254 		  insn_current_align = log;
1255 		  insn_current_address = new_address;
1256 		}
1257 	      else
1258 		insn_lengths[uid] = 0;
1259 	      INSN_ADDRESSES (uid) = insn_current_address;
1260 	      continue;
1261 	    }
1262 
1263 	  length_align = INSN_LENGTH_ALIGNMENT (insn);
1264 	  if (length_align < insn_current_align)
1265 	    insn_current_align = length_align;
1266 
1267 	  insn_last_address = INSN_ADDRESSES (uid);
1268 	  INSN_ADDRESSES (uid) = insn_current_address;
1269 
1270 #ifdef CASE_VECTOR_SHORTEN_MODE
1271 	  if (optimize
1272 	      && JUMP_TABLE_DATA_P (insn)
1273 	      && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1274 	    {
1275 	      rtx body = PATTERN (insn);
1276 	      int old_length = insn_lengths[uid];
1277 	      rtx rel_lab = XEXP (XEXP (body, 0), 0);
1278 	      rtx min_lab = XEXP (XEXP (body, 2), 0);
1279 	      rtx max_lab = XEXP (XEXP (body, 3), 0);
1280 	      int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1281 	      int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1282 	      int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1283 	      rtx prev;
1284 	      int rel_align = 0;
1285 	      addr_diff_vec_flags flags;
1286 	      enum machine_mode vec_mode;
1287 
1288 	      /* Avoid automatic aggregate initialization.  */
1289 	      flags = ADDR_DIFF_VEC_FLAGS (body);
1290 
1291 	      /* Try to find a known alignment for rel_lab.  */
1292 	      for (prev = rel_lab;
1293 		   prev
1294 		   && ! insn_lengths[INSN_UID (prev)]
1295 		   && ! (varying_length[INSN_UID (prev)] & 1);
1296 		   prev = PREV_INSN (prev))
1297 		if (varying_length[INSN_UID (prev)] & 2)
1298 		  {
1299 		    rel_align = LABEL_TO_ALIGNMENT (prev);
1300 		    break;
1301 		  }
1302 
1303 	      /* See the comment on addr_diff_vec_flags in rtl.h for the
1304 		 meaning of the flags values.  base: REL_LAB   vec: INSN  */
1305 	      /* Anything after INSN has still addresses from the last
1306 		 pass; adjust these so that they reflect our current
1307 		 estimate for this pass.  */
1308 	      if (flags.base_after_vec)
1309 		rel_addr += insn_current_address - insn_last_address;
1310 	      if (flags.min_after_vec)
1311 		min_addr += insn_current_address - insn_last_address;
1312 	      if (flags.max_after_vec)
1313 		max_addr += insn_current_address - insn_last_address;
1314 	      /* We want to know the worst case, i.e. lowest possible value
1315 		 for the offset of MIN_LAB.  If MIN_LAB is after REL_LAB,
1316 		 its offset is positive, and we have to be wary of code shrink;
1317 		 otherwise, it is negative, and we have to be vary of code
1318 		 size increase.  */
1319 	      if (flags.min_after_base)
1320 		{
1321 		  /* If INSN is between REL_LAB and MIN_LAB, the size
1322 		     changes we are about to make can change the alignment
1323 		     within the observed offset, therefore we have to break
1324 		     it up into two parts that are independent.  */
1325 		  if (! flags.base_after_vec && flags.min_after_vec)
1326 		    {
1327 		      min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1328 		      min_addr -= align_fuzz (insn, min_lab, 0, 0);
1329 		    }
1330 		  else
1331 		    min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1332 		}
1333 	      else
1334 		{
1335 		  if (flags.base_after_vec && ! flags.min_after_vec)
1336 		    {
1337 		      min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1338 		      min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1339 		    }
1340 		  else
1341 		    min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1342 		}
1343 	      /* Likewise, determine the highest lowest possible value
1344 		 for the offset of MAX_LAB.  */
1345 	      if (flags.max_after_base)
1346 		{
1347 		  if (! flags.base_after_vec && flags.max_after_vec)
1348 		    {
1349 		      max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1350 		      max_addr += align_fuzz (insn, max_lab, 0, ~0);
1351 		    }
1352 		  else
1353 		    max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1354 		}
1355 	      else
1356 		{
1357 		  if (flags.base_after_vec && ! flags.max_after_vec)
1358 		    {
1359 		      max_addr += align_fuzz (max_lab, insn, 0, 0);
1360 		      max_addr += align_fuzz (insn, rel_lab, 0, 0);
1361 		    }
1362 		  else
1363 		    max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1364 		}
1365 	      vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1366 						   max_addr - rel_addr, body);
1367 	      if (!increasing
1368 		  || (GET_MODE_SIZE (vec_mode)
1369 		      >= GET_MODE_SIZE (GET_MODE (body))))
1370 		PUT_MODE (body, vec_mode);
1371 	      if (JUMP_TABLES_IN_TEXT_SECTION
1372 		  || readonly_data_section == text_section)
1373 		{
1374 		  insn_lengths[uid]
1375 		    = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1376 		  insn_current_address += insn_lengths[uid];
1377 		  if (insn_lengths[uid] != old_length)
1378 		    something_changed = 1;
1379 		}
1380 
1381 	      continue;
1382 	    }
1383 #endif /* CASE_VECTOR_SHORTEN_MODE */
1384 
1385 	  if (! (varying_length[uid]))
1386 	    {
1387 	      if (NONJUMP_INSN_P (insn)
1388 		  && GET_CODE (PATTERN (insn)) == SEQUENCE)
1389 		{
1390 		  int i;
1391 
1392 		  body = PATTERN (insn);
1393 		  for (i = 0; i < XVECLEN (body, 0); i++)
1394 		    {
1395 		      rtx inner_insn = XVECEXP (body, 0, i);
1396 		      int inner_uid = INSN_UID (inner_insn);
1397 
1398 		      INSN_ADDRESSES (inner_uid) = insn_current_address;
1399 
1400 		      insn_current_address += insn_lengths[inner_uid];
1401 		    }
1402 		}
1403 	      else
1404 		insn_current_address += insn_lengths[uid];
1405 
1406 	      continue;
1407 	    }
1408 
1409 	  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1410 	    {
1411 	      int i;
1412 
1413 	      body = PATTERN (insn);
1414 	      new_length = 0;
1415 	      for (i = 0; i < XVECLEN (body, 0); i++)
1416 		{
1417 		  rtx inner_insn = XVECEXP (body, 0, i);
1418 		  int inner_uid = INSN_UID (inner_insn);
1419 		  int inner_length;
1420 
1421 		  INSN_ADDRESSES (inner_uid) = insn_current_address;
1422 
1423 		  /* insn_current_length returns 0 for insns with a
1424 		     non-varying length.  */
1425 		  if (! varying_length[inner_uid])
1426 		    inner_length = insn_lengths[inner_uid];
1427 		  else
1428 		    inner_length = insn_current_length (inner_insn);
1429 
1430 		  if (inner_length != insn_lengths[inner_uid])
1431 		    {
1432 		      if (!increasing || inner_length > insn_lengths[inner_uid])
1433 			{
1434 			  insn_lengths[inner_uid] = inner_length;
1435 			  something_changed = 1;
1436 			}
1437 		      else
1438 			inner_length = insn_lengths[inner_uid];
1439 		    }
1440 		  insn_current_address += inner_length;
1441 		  new_length += inner_length;
1442 		}
1443 	    }
1444 	  else
1445 	    {
1446 	      new_length = insn_current_length (insn);
1447 	      insn_current_address += new_length;
1448 	    }
1449 
1450 #ifdef ADJUST_INSN_LENGTH
1451 	  /* If needed, do any adjustment.  */
1452 	  tmp_length = new_length;
1453 	  ADJUST_INSN_LENGTH (insn, new_length);
1454 	  insn_current_address += (new_length - tmp_length);
1455 #endif
1456 
1457 	  if (new_length != insn_lengths[uid]
1458 	      && (!increasing || new_length > insn_lengths[uid]))
1459 	    {
1460 	      insn_lengths[uid] = new_length;
1461 	      something_changed = 1;
1462 	    }
1463 	  else
1464 	    insn_current_address += insn_lengths[uid] - new_length;
1465 	}
1466       /* For a non-optimizing compile, do only a single pass.  */
1467       if (!increasing)
1468 	break;
1469     }
1470 
1471   free (varying_length);
1472 }
1473 
1474 /* Given the body of an INSN known to be generated by an ASM statement, return
1475    the number of machine instructions likely to be generated for this insn.
1476    This is used to compute its length.  */
1477 
1478 static int
asm_insn_count(rtx body)1479 asm_insn_count (rtx body)
1480 {
1481   const char *templ;
1482 
1483   if (GET_CODE (body) == ASM_INPUT)
1484     templ = XSTR (body, 0);
1485   else
1486     templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1487 
1488   return asm_str_count (templ);
1489 }
1490 
1491 /* Return the number of machine instructions likely to be generated for the
1492    inline-asm template. */
1493 int
asm_str_count(const char * templ)1494 asm_str_count (const char *templ)
1495 {
1496   int count = 1;
1497 
1498   if (!*templ)
1499     return 0;
1500 
1501   for (; *templ; templ++)
1502     if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1503 	|| *templ == '\n')
1504       count++;
1505 
1506   return count;
1507 }
1508 
1509 /* ??? This is probably the wrong place for these.  */
1510 /* Structure recording the mapping from source file and directory
1511    names at compile time to those to be embedded in debug
1512    information.  */
1513 typedef struct debug_prefix_map
1514 {
1515   const char *old_prefix;
1516   const char *new_prefix;
1517   size_t old_len;
1518   size_t new_len;
1519   struct debug_prefix_map *next;
1520 } debug_prefix_map;
1521 
1522 /* Linked list of such structures.  */
1523 static debug_prefix_map *debug_prefix_maps;
1524 
1525 
1526 /* Record a debug file prefix mapping.  ARG is the argument to
1527    -fdebug-prefix-map and must be of the form OLD=NEW.  */
1528 
1529 void
add_debug_prefix_map(const char * arg)1530 add_debug_prefix_map (const char *arg)
1531 {
1532   debug_prefix_map *map;
1533   const char *p;
1534 
1535   p = strchr (arg, '=');
1536   if (!p)
1537     {
1538       error ("invalid argument %qs to -fdebug-prefix-map", arg);
1539       return;
1540     }
1541   map = XNEW (debug_prefix_map);
1542   map->old_prefix = xstrndup (arg, p - arg);
1543   map->old_len = p - arg;
1544   p++;
1545   map->new_prefix = xstrdup (p);
1546   map->new_len = strlen (p);
1547   map->next = debug_prefix_maps;
1548   debug_prefix_maps = map;
1549 }
1550 
1551 /* Perform user-specified mapping of debug filename prefixes.  Return
1552    the new name corresponding to FILENAME.  */
1553 
1554 const char *
remap_debug_filename(const char * filename)1555 remap_debug_filename (const char *filename)
1556 {
1557   debug_prefix_map *map;
1558   char *s;
1559   const char *name;
1560   size_t name_len;
1561 
1562   for (map = debug_prefix_maps; map; map = map->next)
1563     if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1564       break;
1565   if (!map)
1566     return filename;
1567   name = filename + map->old_len;
1568   name_len = strlen (name) + 1;
1569   s = (char *) alloca (name_len + map->new_len);
1570   memcpy (s, map->new_prefix, map->new_len);
1571   memcpy (s + map->new_len, name, name_len);
1572   return ggc_strdup (s);
1573 }
1574 
1575 /* Return true if DWARF2 debug info can be emitted for DECL.  */
1576 
1577 static bool
dwarf2_debug_info_emitted_p(tree decl)1578 dwarf2_debug_info_emitted_p (tree decl)
1579 {
1580   if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1581     return false;
1582 
1583   if (DECL_IGNORED_P (decl))
1584     return false;
1585 
1586   return true;
1587 }
1588 
1589 /* Return scope resulting from combination of S1 and S2.  */
1590 static tree
choose_inner_scope(tree s1,tree s2)1591 choose_inner_scope (tree s1, tree s2)
1592 {
1593    if (!s1)
1594      return s2;
1595    if (!s2)
1596      return s1;
1597    if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1598      return s1;
1599    return s2;
1600 }
1601 
1602 /* Emit lexical block notes needed to change scope from S1 to S2.  */
1603 
1604 static void
change_scope(rtx orig_insn,tree s1,tree s2)1605 change_scope (rtx orig_insn, tree s1, tree s2)
1606 {
1607   rtx insn = orig_insn;
1608   tree com = NULL_TREE;
1609   tree ts1 = s1, ts2 = s2;
1610   tree s;
1611 
1612   while (ts1 != ts2)
1613     {
1614       gcc_assert (ts1 && ts2);
1615       if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1616 	ts1 = BLOCK_SUPERCONTEXT (ts1);
1617       else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1618 	ts2 = BLOCK_SUPERCONTEXT (ts2);
1619       else
1620 	{
1621 	  ts1 = BLOCK_SUPERCONTEXT (ts1);
1622 	  ts2 = BLOCK_SUPERCONTEXT (ts2);
1623 	}
1624     }
1625   com = ts1;
1626 
1627   /* Close scopes.  */
1628   s = s1;
1629   while (s != com)
1630     {
1631       rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1632       NOTE_BLOCK (note) = s;
1633       s = BLOCK_SUPERCONTEXT (s);
1634     }
1635 
1636   /* Open scopes.  */
1637   s = s2;
1638   while (s != com)
1639     {
1640       insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1641       NOTE_BLOCK (insn) = s;
1642       s = BLOCK_SUPERCONTEXT (s);
1643     }
1644 }
1645 
1646 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1647    on the scope tree and the newly reordered instructions.  */
1648 
1649 static void
reemit_insn_block_notes(void)1650 reemit_insn_block_notes (void)
1651 {
1652   tree cur_block = DECL_INITIAL (cfun->decl);
1653   rtx insn, note;
1654 
1655   insn = get_insns ();
1656   for (; insn; insn = NEXT_INSN (insn))
1657     {
1658       tree this_block;
1659 
1660       /* Prevent lexical blocks from straddling section boundaries.  */
1661       if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
1662         {
1663           for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1664                s = BLOCK_SUPERCONTEXT (s))
1665             {
1666               rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1667               NOTE_BLOCK (note) = s;
1668               note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1669               NOTE_BLOCK (note) = s;
1670             }
1671         }
1672 
1673       if (!active_insn_p (insn))
1674         continue;
1675 
1676       /* Avoid putting scope notes between jump table and its label.  */
1677       if (JUMP_TABLE_DATA_P (insn))
1678 	continue;
1679 
1680       this_block = insn_scope (insn);
1681       /* For sequences compute scope resulting from merging all scopes
1682 	 of instructions nested inside.  */
1683       if (GET_CODE (PATTERN (insn)) == SEQUENCE)
1684 	{
1685 	  int i;
1686 	  rtx body = PATTERN (insn);
1687 
1688 	  this_block = NULL;
1689 	  for (i = 0; i < XVECLEN (body, 0); i++)
1690 	    this_block = choose_inner_scope (this_block,
1691 					     insn_scope (XVECEXP (body, 0, i)));
1692 	}
1693       if (! this_block)
1694 	{
1695 	  if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1696 	    continue;
1697 	  else
1698 	    this_block = DECL_INITIAL (cfun->decl);
1699 	}
1700 
1701       if (this_block != cur_block)
1702 	{
1703 	  change_scope (insn, cur_block, this_block);
1704 	  cur_block = this_block;
1705 	}
1706     }
1707 
1708   /* change_scope emits before the insn, not after.  */
1709   note = emit_note (NOTE_INSN_DELETED);
1710   change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1711   delete_insn (note);
1712 
1713   reorder_blocks ();
1714 }
1715 
1716 /* Output assembler code for the start of a function,
1717    and initialize some of the variables in this file
1718    for the new function.  The label for the function and associated
1719    assembler pseudo-ops have already been output in `assemble_start_function'.
1720 
1721    FIRST is the first insn of the rtl for the function being compiled.
1722    FILE is the file to write assembler code to.
1723    OPTIMIZE_P is nonzero if we should eliminate redundant
1724      test and compare insns.  */
1725 
1726 void
final_start_function(rtx first,FILE * file,int optimize_p ATTRIBUTE_UNUSED)1727 final_start_function (rtx first, FILE *file,
1728 		      int optimize_p ATTRIBUTE_UNUSED)
1729 {
1730   block_depth = 0;
1731 
1732   this_is_asm_operands = 0;
1733 
1734   need_profile_function = false;
1735 
1736   last_filename = LOCATION_FILE (prologue_location);
1737   last_linenum = LOCATION_LINE (prologue_location);
1738   last_discriminator = discriminator = 0;
1739 
1740   high_block_linenum = high_function_linenum = last_linenum;
1741 
1742   if (flag_sanitize & SANITIZE_ADDRESS)
1743     asan_function_start ();
1744 
1745   if (!DECL_IGNORED_P (current_function_decl))
1746     debug_hooks->begin_prologue (last_linenum, last_filename);
1747 
1748   if (!dwarf2_debug_info_emitted_p (current_function_decl))
1749     dwarf2out_begin_prologue (0, NULL);
1750 
1751 #ifdef LEAF_REG_REMAP
1752   if (crtl->uses_only_leaf_regs)
1753     leaf_renumber_regs (first);
1754 #endif
1755 
1756   /* The Sun386i and perhaps other machines don't work right
1757      if the profiling code comes after the prologue.  */
1758   if (targetm.profile_before_prologue () && crtl->profile)
1759     {
1760       if (targetm.asm_out.function_prologue
1761 	  == default_function_pro_epilogue
1762 #ifdef HAVE_prologue
1763 	  && HAVE_prologue
1764 #endif
1765 	 )
1766 	{
1767 	  rtx insn;
1768 	  for (insn = first; insn; insn = NEXT_INSN (insn))
1769 	    if (!NOTE_P (insn))
1770 	      {
1771 		insn = NULL_RTX;
1772 		break;
1773 	      }
1774 	    else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1775 		     || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1776 	      break;
1777 	    else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1778 		     || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1779 	      continue;
1780 	    else
1781 	      {
1782 		insn = NULL_RTX;
1783 		break;
1784 	      }
1785 
1786 	  if (insn)
1787 	    need_profile_function = true;
1788 	  else
1789 	    profile_function (file);
1790 	}
1791       else
1792 	profile_function (file);
1793     }
1794 
1795   /* If debugging, assign block numbers to all of the blocks in this
1796      function.  */
1797   if (write_symbols)
1798     {
1799       reemit_insn_block_notes ();
1800       number_blocks (current_function_decl);
1801       /* We never actually put out begin/end notes for the top-level
1802 	 block in the function.  But, conceptually, that block is
1803 	 always needed.  */
1804       TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1805     }
1806 
1807   if (warn_frame_larger_than
1808     && get_frame_size () > frame_larger_than_size)
1809   {
1810       /* Issue a warning */
1811       warning (OPT_Wframe_larger_than_,
1812                "the frame size of %wd bytes is larger than %wd bytes",
1813                get_frame_size (), frame_larger_than_size);
1814   }
1815 
1816   /* First output the function prologue: code to set up the stack frame.  */
1817   targetm.asm_out.function_prologue (file, get_frame_size ());
1818 
1819   /* If the machine represents the prologue as RTL, the profiling code must
1820      be emitted when NOTE_INSN_PROLOGUE_END is scanned.  */
1821 #ifdef HAVE_prologue
1822   if (! HAVE_prologue)
1823 #endif
1824     profile_after_prologue (file);
1825 }
1826 
1827 static void
profile_after_prologue(FILE * file ATTRIBUTE_UNUSED)1828 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1829 {
1830   if (!targetm.profile_before_prologue () && crtl->profile)
1831     profile_function (file);
1832 }
1833 
1834 static void
profile_function(FILE * file ATTRIBUTE_UNUSED)1835 profile_function (FILE *file ATTRIBUTE_UNUSED)
1836 {
1837 #ifndef NO_PROFILE_COUNTERS
1838 # define NO_PROFILE_COUNTERS	0
1839 #endif
1840 #ifdef ASM_OUTPUT_REG_PUSH
1841   rtx sval = NULL, chain = NULL;
1842 
1843   if (cfun->returns_struct)
1844     sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1845 					   true);
1846   if (cfun->static_chain_decl)
1847     chain = targetm.calls.static_chain (current_function_decl, true);
1848 #endif /* ASM_OUTPUT_REG_PUSH */
1849 
1850   if (! NO_PROFILE_COUNTERS)
1851     {
1852       int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1853       switch_to_section (data_section);
1854       ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1855       targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1856       assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1857     }
1858 
1859   switch_to_section (current_function_section ());
1860 
1861 #ifdef ASM_OUTPUT_REG_PUSH
1862   if (sval && REG_P (sval))
1863     ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1864   if (chain && REG_P (chain))
1865     ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1866 #endif
1867 
1868   FUNCTION_PROFILER (file, current_function_funcdef_no);
1869 
1870 #ifdef ASM_OUTPUT_REG_PUSH
1871   if (chain && REG_P (chain))
1872     ASM_OUTPUT_REG_POP (file, REGNO (chain));
1873   if (sval && REG_P (sval))
1874     ASM_OUTPUT_REG_POP (file, REGNO (sval));
1875 #endif
1876 }
1877 
1878 /* Output assembler code for the end of a function.
1879    For clarity, args are same as those of `final_start_function'
1880    even though not all of them are needed.  */
1881 
1882 void
final_end_function(void)1883 final_end_function (void)
1884 {
1885   app_disable ();
1886 
1887   if (!DECL_IGNORED_P (current_function_decl))
1888     debug_hooks->end_function (high_function_linenum);
1889 
1890   /* Finally, output the function epilogue:
1891      code to restore the stack frame and return to the caller.  */
1892   targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1893 
1894   /* And debug output.  */
1895   if (!DECL_IGNORED_P (current_function_decl))
1896     debug_hooks->end_epilogue (last_linenum, last_filename);
1897 
1898   if (!dwarf2_debug_info_emitted_p (current_function_decl)
1899       && dwarf2out_do_frame ())
1900     dwarf2out_end_epilogue (last_linenum, last_filename);
1901 }
1902 
1903 
1904 /* Dumper helper for basic block information. FILE is the assembly
1905    output file, and INSN is the instruction being emitted.  */
1906 
1907 static void
dump_basic_block_info(FILE * file,rtx insn,basic_block * start_to_bb,basic_block * end_to_bb,int bb_map_size,int * bb_seqn)1908 dump_basic_block_info (FILE *file, rtx insn, basic_block *start_to_bb,
1909                        basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1910 {
1911   basic_block bb;
1912 
1913   if (!flag_debug_asm)
1914     return;
1915 
1916   if (INSN_UID (insn) < bb_map_size
1917       && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1918     {
1919       edge e;
1920       edge_iterator ei;
1921 
1922       fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1923       if (bb->frequency)
1924         fprintf (file, " freq:%d", bb->frequency);
1925       if (bb->count)
1926         fprintf (file, " count:" HOST_WIDEST_INT_PRINT_DEC,
1927                  bb->count);
1928       fprintf (file, " seq:%d", (*bb_seqn)++);
1929       fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1930       FOR_EACH_EDGE (e, ei, bb->preds)
1931         {
1932           dump_edge_info (file, e, TDF_DETAILS, 0);
1933         }
1934       fprintf (file, "\n");
1935     }
1936   if (INSN_UID (insn) < bb_map_size
1937       && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1938     {
1939       edge e;
1940       edge_iterator ei;
1941 
1942       fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1943       FOR_EACH_EDGE (e, ei, bb->succs)
1944        {
1945          dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1946        }
1947       fprintf (file, "\n");
1948     }
1949 }
1950 
1951 /* Output assembler code for some insns: all or part of a function.
1952    For description of args, see `final_start_function', above.  */
1953 
1954 void
final(rtx first,FILE * file,int optimize_p)1955 final (rtx first, FILE *file, int optimize_p)
1956 {
1957   rtx insn, next;
1958   int seen = 0;
1959 
1960   /* Used for -dA dump.  */
1961   basic_block *start_to_bb = NULL;
1962   basic_block *end_to_bb = NULL;
1963   int bb_map_size = 0;
1964   int bb_seqn = 0;
1965 
1966   last_ignored_compare = 0;
1967 
1968 #ifdef HAVE_cc0
1969   for (insn = first; insn; insn = NEXT_INSN (insn))
1970     {
1971       /* If CC tracking across branches is enabled, record the insn which
1972 	 jumps to each branch only reached from one place.  */
1973       if (optimize_p && JUMP_P (insn))
1974 	{
1975 	  rtx lab = JUMP_LABEL (insn);
1976 	  if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
1977 	    {
1978 	      LABEL_REFS (lab) = insn;
1979 	    }
1980 	}
1981     }
1982 #endif
1983 
1984   init_recog ();
1985 
1986   CC_STATUS_INIT;
1987 
1988   if (flag_debug_asm)
1989     {
1990       basic_block bb;
1991 
1992       bb_map_size = get_max_uid () + 1;
1993       start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1994       end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1995 
1996       /* There is no cfg for a thunk.  */
1997       if (!cfun->is_thunk)
1998 	FOR_EACH_BB_REVERSE_FN (bb, cfun)
1999 	  {
2000 	    start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
2001 	    end_to_bb[INSN_UID (BB_END (bb))] = bb;
2002 	  }
2003     }
2004 
2005   /* Output the insns.  */
2006   for (insn = first; insn;)
2007     {
2008       if (HAVE_ATTR_length)
2009 	{
2010 	  if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
2011 	    {
2012 	      /* This can be triggered by bugs elsewhere in the compiler if
2013 		 new insns are created after init_insn_lengths is called.  */
2014 	      gcc_assert (NOTE_P (insn));
2015 	      insn_current_address = -1;
2016 	    }
2017 	  else
2018 	    insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
2019 	}
2020 
2021       dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
2022                              bb_map_size, &bb_seqn);
2023       insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
2024     }
2025 
2026   if (flag_debug_asm)
2027     {
2028       free (start_to_bb);
2029       free (end_to_bb);
2030     }
2031 
2032   /* Remove CFI notes, to avoid compare-debug failures.  */
2033   for (insn = first; insn; insn = next)
2034     {
2035       next = NEXT_INSN (insn);
2036       if (NOTE_P (insn)
2037 	  && (NOTE_KIND (insn) == NOTE_INSN_CFI
2038 	      || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2039 	delete_insn (insn);
2040     }
2041 }
2042 
2043 const char *
get_insn_template(int code,rtx insn)2044 get_insn_template (int code, rtx insn)
2045 {
2046   switch (insn_data[code].output_format)
2047     {
2048     case INSN_OUTPUT_FORMAT_SINGLE:
2049       return insn_data[code].output.single;
2050     case INSN_OUTPUT_FORMAT_MULTI:
2051       return insn_data[code].output.multi[which_alternative];
2052     case INSN_OUTPUT_FORMAT_FUNCTION:
2053       gcc_assert (insn);
2054       return (*insn_data[code].output.function) (recog_data.operand, insn);
2055 
2056     default:
2057       gcc_unreachable ();
2058     }
2059 }
2060 
2061 /* Emit the appropriate declaration for an alternate-entry-point
2062    symbol represented by INSN, to FILE.  INSN is a CODE_LABEL with
2063    LABEL_KIND != LABEL_NORMAL.
2064 
2065    The case fall-through in this function is intentional.  */
2066 static void
output_alternate_entry_point(FILE * file,rtx insn)2067 output_alternate_entry_point (FILE *file, rtx insn)
2068 {
2069   const char *name = LABEL_NAME (insn);
2070 
2071   switch (LABEL_KIND (insn))
2072     {
2073     case LABEL_WEAK_ENTRY:
2074 #ifdef ASM_WEAKEN_LABEL
2075       ASM_WEAKEN_LABEL (file, name);
2076 #endif
2077     case LABEL_GLOBAL_ENTRY:
2078       targetm.asm_out.globalize_label (file, name);
2079     case LABEL_STATIC_ENTRY:
2080 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2081       ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2082 #endif
2083       ASM_OUTPUT_LABEL (file, name);
2084       break;
2085 
2086     case LABEL_NORMAL:
2087     default:
2088       gcc_unreachable ();
2089     }
2090 }
2091 
2092 /* Given a CALL_INSN, find and return the nested CALL. */
2093 static rtx
call_from_call_insn(rtx insn)2094 call_from_call_insn (rtx insn)
2095 {
2096   rtx x;
2097   gcc_assert (CALL_P (insn));
2098   x = PATTERN (insn);
2099 
2100   while (GET_CODE (x) != CALL)
2101     {
2102       switch (GET_CODE (x))
2103 	{
2104 	default:
2105 	  gcc_unreachable ();
2106 	case COND_EXEC:
2107 	  x = COND_EXEC_CODE (x);
2108 	  break;
2109 	case PARALLEL:
2110 	  x = XVECEXP (x, 0, 0);
2111 	  break;
2112 	case SET:
2113 	  x = XEXP (x, 1);
2114 	  break;
2115 	}
2116     }
2117   return x;
2118 }
2119 
2120 /* The final scan for one insn, INSN.
2121    Args are same as in `final', except that INSN
2122    is the insn being scanned.
2123    Value returned is the next insn to be scanned.
2124 
2125    NOPEEPHOLES is the flag to disallow peephole processing (currently
2126    used for within delayed branch sequence output).
2127 
2128    SEEN is used to track the end of the prologue, for emitting
2129    debug information.  We force the emission of a line note after
2130    both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG.  */
2131 
2132 rtx
final_scan_insn(rtx insn,FILE * file,int optimize_p ATTRIBUTE_UNUSED,int nopeepholes ATTRIBUTE_UNUSED,int * seen)2133 final_scan_insn (rtx insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2134 		 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2135 {
2136 #ifdef HAVE_cc0
2137   rtx set;
2138 #endif
2139   rtx next;
2140 
2141   insn_counter++;
2142 
2143   /* Ignore deleted insns.  These can occur when we split insns (due to a
2144      template of "#") while not optimizing.  */
2145   if (INSN_DELETED_P (insn))
2146     return NEXT_INSN (insn);
2147 
2148   switch (GET_CODE (insn))
2149     {
2150     case NOTE:
2151       switch (NOTE_KIND (insn))
2152 	{
2153 	case NOTE_INSN_DELETED:
2154 	  break;
2155 
2156 	case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2157 	  in_cold_section_p = !in_cold_section_p;
2158 
2159 	  if (dwarf2out_do_frame ())
2160 	    dwarf2out_switch_text_section ();
2161 	  else if (!DECL_IGNORED_P (current_function_decl))
2162 	    debug_hooks->switch_text_section ();
2163 
2164 	  switch_to_section (current_function_section ());
2165 	  targetm.asm_out.function_switched_text_sections (asm_out_file,
2166 							   current_function_decl,
2167 							   in_cold_section_p);
2168 	  /* Emit a label for the split cold section.  Form label name by
2169 	     suffixing "cold" to the original function's name.  */
2170 	  if (in_cold_section_p)
2171 	    {
2172 	      tree cold_function_name
2173 		= clone_function_name (current_function_decl, "cold");
2174 	      ASM_OUTPUT_LABEL (asm_out_file,
2175 				IDENTIFIER_POINTER (cold_function_name));
2176 	    }
2177 	  break;
2178 
2179 	case NOTE_INSN_BASIC_BLOCK:
2180 	  if (need_profile_function)
2181 	    {
2182 	      profile_function (asm_out_file);
2183 	      need_profile_function = false;
2184 	    }
2185 
2186 	  if (targetm.asm_out.unwind_emit)
2187 	    targetm.asm_out.unwind_emit (asm_out_file, insn);
2188 
2189           discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2190 
2191 	  break;
2192 
2193 	case NOTE_INSN_EH_REGION_BEG:
2194 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2195 				  NOTE_EH_HANDLER (insn));
2196 	  break;
2197 
2198 	case NOTE_INSN_EH_REGION_END:
2199 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2200 				  NOTE_EH_HANDLER (insn));
2201 	  break;
2202 
2203 	case NOTE_INSN_PROLOGUE_END:
2204 	  targetm.asm_out.function_end_prologue (file);
2205 	  profile_after_prologue (file);
2206 
2207 	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2208 	    {
2209 	      *seen |= SEEN_EMITTED;
2210 	      force_source_line = true;
2211 	    }
2212 	  else
2213 	    *seen |= SEEN_NOTE;
2214 
2215 	  break;
2216 
2217 	case NOTE_INSN_EPILOGUE_BEG:
2218           if (!DECL_IGNORED_P (current_function_decl))
2219             (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2220 	  targetm.asm_out.function_begin_epilogue (file);
2221 	  break;
2222 
2223 	case NOTE_INSN_CFI:
2224 	  dwarf2out_emit_cfi (NOTE_CFI (insn));
2225 	  break;
2226 
2227 	case NOTE_INSN_CFI_LABEL:
2228 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2229 				  NOTE_LABEL_NUMBER (insn));
2230 	  break;
2231 
2232 	case NOTE_INSN_FUNCTION_BEG:
2233 	  if (need_profile_function)
2234 	    {
2235 	      profile_function (asm_out_file);
2236 	      need_profile_function = false;
2237 	    }
2238 
2239 	  app_disable ();
2240 	  if (!DECL_IGNORED_P (current_function_decl))
2241 	    debug_hooks->end_prologue (last_linenum, last_filename);
2242 
2243 	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2244 	    {
2245 	      *seen |= SEEN_EMITTED;
2246 	      force_source_line = true;
2247 	    }
2248 	  else
2249 	    *seen |= SEEN_NOTE;
2250 
2251 	  break;
2252 
2253 	case NOTE_INSN_BLOCK_BEG:
2254 	  if (debug_info_level == DINFO_LEVEL_NORMAL
2255 	      || debug_info_level == DINFO_LEVEL_VERBOSE
2256 	      || write_symbols == DWARF2_DEBUG
2257 	      || write_symbols == VMS_AND_DWARF2_DEBUG
2258 	      || write_symbols == VMS_DEBUG)
2259 	    {
2260 	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2261 
2262 	      app_disable ();
2263 	      ++block_depth;
2264 	      high_block_linenum = last_linenum;
2265 
2266 	      /* Output debugging info about the symbol-block beginning.  */
2267 	      if (!DECL_IGNORED_P (current_function_decl))
2268 		debug_hooks->begin_block (last_linenum, n);
2269 
2270 	      /* Mark this block as output.  */
2271 	      TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2272 	    }
2273 	  if (write_symbols == DBX_DEBUG
2274 	      || write_symbols == SDB_DEBUG)
2275 	    {
2276 	      location_t *locus_ptr
2277 		= block_nonartificial_location (NOTE_BLOCK (insn));
2278 
2279 	      if (locus_ptr != NULL)
2280 		{
2281 		  override_filename = LOCATION_FILE (*locus_ptr);
2282 		  override_linenum = LOCATION_LINE (*locus_ptr);
2283 		}
2284 	    }
2285 	  break;
2286 
2287 	case NOTE_INSN_BLOCK_END:
2288 	  if (debug_info_level == DINFO_LEVEL_NORMAL
2289 	      || debug_info_level == DINFO_LEVEL_VERBOSE
2290 	      || write_symbols == DWARF2_DEBUG
2291 	      || write_symbols == VMS_AND_DWARF2_DEBUG
2292 	      || write_symbols == VMS_DEBUG)
2293 	    {
2294 	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2295 
2296 	      app_disable ();
2297 
2298 	      /* End of a symbol-block.  */
2299 	      --block_depth;
2300 	      gcc_assert (block_depth >= 0);
2301 
2302 	      if (!DECL_IGNORED_P (current_function_decl))
2303 		debug_hooks->end_block (high_block_linenum, n);
2304 	    }
2305 	  if (write_symbols == DBX_DEBUG
2306 	      || write_symbols == SDB_DEBUG)
2307 	    {
2308 	      tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2309 	      location_t *locus_ptr
2310 		= block_nonartificial_location (outer_block);
2311 
2312 	      if (locus_ptr != NULL)
2313 		{
2314 		  override_filename = LOCATION_FILE (*locus_ptr);
2315 		  override_linenum = LOCATION_LINE (*locus_ptr);
2316 		}
2317 	      else
2318 		{
2319 		  override_filename = NULL;
2320 		  override_linenum = 0;
2321 		}
2322 	    }
2323 	  break;
2324 
2325 	case NOTE_INSN_DELETED_LABEL:
2326 	  /* Emit the label.  We may have deleted the CODE_LABEL because
2327 	     the label could be proved to be unreachable, though still
2328 	     referenced (in the form of having its address taken.  */
2329 	  ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2330 	  break;
2331 
2332 	case NOTE_INSN_DELETED_DEBUG_LABEL:
2333 	  /* Similarly, but need to use different namespace for it.  */
2334 	  if (CODE_LABEL_NUMBER (insn) != -1)
2335 	    ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2336 	  break;
2337 
2338 	case NOTE_INSN_VAR_LOCATION:
2339 	case NOTE_INSN_CALL_ARG_LOCATION:
2340 	  if (!DECL_IGNORED_P (current_function_decl))
2341 	    debug_hooks->var_location (insn);
2342 	  break;
2343 
2344 	default:
2345 	  gcc_unreachable ();
2346 	  break;
2347 	}
2348       break;
2349 
2350     case BARRIER:
2351       break;
2352 
2353     case CODE_LABEL:
2354       /* The target port might emit labels in the output function for
2355 	 some insn, e.g. sh.c output_branchy_insn.  */
2356       if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2357 	{
2358 	  int align = LABEL_TO_ALIGNMENT (insn);
2359 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2360 	  int max_skip = LABEL_TO_MAX_SKIP (insn);
2361 #endif
2362 
2363 	  if (align && NEXT_INSN (insn))
2364 	    {
2365 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2366 	      ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2367 #else
2368 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2369               ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2370 #else
2371 	      ASM_OUTPUT_ALIGN (file, align);
2372 #endif
2373 #endif
2374 	    }
2375 	}
2376       CC_STATUS_INIT;
2377 
2378       if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2379 	debug_hooks->label (insn);
2380 
2381       app_disable ();
2382 
2383       next = next_nonnote_insn (insn);
2384       /* If this label is followed by a jump-table, make sure we put
2385 	 the label in the read-only section.  Also possibly write the
2386 	 label and jump table together.  */
2387       if (next != 0 && JUMP_TABLE_DATA_P (next))
2388 	{
2389 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2390 	  /* In this case, the case vector is being moved by the
2391 	     target, so don't output the label at all.  Leave that
2392 	     to the back end macros.  */
2393 #else
2394 	  if (! JUMP_TABLES_IN_TEXT_SECTION)
2395 	    {
2396 	      int log_align;
2397 
2398 	      switch_to_section (targetm.asm_out.function_rodata_section
2399 				 (current_function_decl));
2400 
2401 #ifdef ADDR_VEC_ALIGN
2402 	      log_align = ADDR_VEC_ALIGN (next);
2403 #else
2404 	      log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2405 #endif
2406 	      ASM_OUTPUT_ALIGN (file, log_align);
2407 	    }
2408 	  else
2409 	    switch_to_section (current_function_section ());
2410 
2411 #ifdef ASM_OUTPUT_CASE_LABEL
2412 	  ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2413 				 next);
2414 #else
2415 	  targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2416 #endif
2417 #endif
2418 	  break;
2419 	}
2420       if (LABEL_ALT_ENTRY_P (insn))
2421 	output_alternate_entry_point (file, insn);
2422       else
2423 	targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2424       break;
2425 
2426     default:
2427       {
2428 	rtx body = PATTERN (insn);
2429 	int insn_code_number;
2430 	const char *templ;
2431 	bool is_stmt;
2432 
2433 	/* Reset this early so it is correct for ASM statements.  */
2434 	current_insn_predicate = NULL_RTX;
2435 
2436 	/* An INSN, JUMP_INSN or CALL_INSN.
2437 	   First check for special kinds that recog doesn't recognize.  */
2438 
2439 	if (GET_CODE (body) == USE /* These are just declarations.  */
2440 	    || GET_CODE (body) == CLOBBER)
2441 	  break;
2442 
2443 #ifdef HAVE_cc0
2444 	{
2445 	  /* If there is a REG_CC_SETTER note on this insn, it means that
2446 	     the setting of the condition code was done in the delay slot
2447 	     of the insn that branched here.  So recover the cc status
2448 	     from the insn that set it.  */
2449 
2450 	  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2451 	  if (note)
2452 	    {
2453 	      NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
2454 	      cc_prev_status = cc_status;
2455 	    }
2456 	}
2457 #endif
2458 
2459 	/* Detect insns that are really jump-tables
2460 	   and output them as such.  */
2461 
2462         if (JUMP_TABLE_DATA_P (insn))
2463 	  {
2464 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2465 	    int vlen, idx;
2466 #endif
2467 
2468 	    if (! JUMP_TABLES_IN_TEXT_SECTION)
2469 	      switch_to_section (targetm.asm_out.function_rodata_section
2470 				 (current_function_decl));
2471 	    else
2472 	      switch_to_section (current_function_section ());
2473 
2474 	    app_disable ();
2475 
2476 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2477 	    if (GET_CODE (body) == ADDR_VEC)
2478 	      {
2479 #ifdef ASM_OUTPUT_ADDR_VEC
2480 		ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2481 #else
2482 		gcc_unreachable ();
2483 #endif
2484 	      }
2485 	    else
2486 	      {
2487 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2488 		ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2489 #else
2490 		gcc_unreachable ();
2491 #endif
2492 	      }
2493 #else
2494 	    vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2495 	    for (idx = 0; idx < vlen; idx++)
2496 	      {
2497 		if (GET_CODE (body) == ADDR_VEC)
2498 		  {
2499 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2500 		    ASM_OUTPUT_ADDR_VEC_ELT
2501 		      (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2502 #else
2503 		    gcc_unreachable ();
2504 #endif
2505 		  }
2506 		else
2507 		  {
2508 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2509 		    ASM_OUTPUT_ADDR_DIFF_ELT
2510 		      (file,
2511 		       body,
2512 		       CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2513 		       CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2514 #else
2515 		    gcc_unreachable ();
2516 #endif
2517 		  }
2518 	      }
2519 #ifdef ASM_OUTPUT_CASE_END
2520 	    ASM_OUTPUT_CASE_END (file,
2521 				 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2522 				 insn);
2523 #endif
2524 #endif
2525 
2526 	    switch_to_section (current_function_section ());
2527 
2528 	    break;
2529 	  }
2530 	/* Output this line note if it is the first or the last line
2531 	   note in a row.  */
2532 	if (!DECL_IGNORED_P (current_function_decl)
2533 	    && notice_source_line (insn, &is_stmt))
2534 	  (*debug_hooks->source_line) (last_linenum, last_filename,
2535 				       last_discriminator, is_stmt);
2536 
2537 	if (GET_CODE (body) == ASM_INPUT)
2538 	  {
2539 	    const char *string = XSTR (body, 0);
2540 
2541 	    /* There's no telling what that did to the condition codes.  */
2542 	    CC_STATUS_INIT;
2543 
2544 	    if (string[0])
2545 	      {
2546 		expanded_location loc;
2547 
2548 		app_enable ();
2549 		loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2550 		if (*loc.file && loc.line)
2551 		  fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2552 			   ASM_COMMENT_START, loc.line, loc.file);
2553 		fprintf (asm_out_file, "\t%s\n", string);
2554 #if HAVE_AS_LINE_ZERO
2555 		if (*loc.file && loc.line)
2556 		  fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2557 #endif
2558 	      }
2559 	    break;
2560 	  }
2561 
2562 	/* Detect `asm' construct with operands.  */
2563 	if (asm_noperands (body) >= 0)
2564 	  {
2565 	    unsigned int noperands = asm_noperands (body);
2566 	    rtx *ops = XALLOCAVEC (rtx, noperands);
2567 	    const char *string;
2568 	    location_t loc;
2569 	    expanded_location expanded;
2570 
2571 	    /* There's no telling what that did to the condition codes.  */
2572 	    CC_STATUS_INIT;
2573 
2574 	    /* Get out the operand values.  */
2575 	    string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2576 	    /* Inhibit dying on what would otherwise be compiler bugs.  */
2577 	    insn_noperands = noperands;
2578 	    this_is_asm_operands = insn;
2579 	    expanded = expand_location (loc);
2580 
2581 #ifdef FINAL_PRESCAN_INSN
2582 	    FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2583 #endif
2584 
2585 	    /* Output the insn using them.  */
2586 	    if (string[0])
2587 	      {
2588 		app_enable ();
2589 		if (expanded.file && expanded.line)
2590 		  fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2591 			   ASM_COMMENT_START, expanded.line, expanded.file);
2592 	        output_asm_insn (string, ops);
2593 #if HAVE_AS_LINE_ZERO
2594 		if (expanded.file && expanded.line)
2595 		  fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2596 #endif
2597 	      }
2598 
2599 	    if (targetm.asm_out.final_postscan_insn)
2600 	      targetm.asm_out.final_postscan_insn (file, insn, ops,
2601 						   insn_noperands);
2602 
2603 	    this_is_asm_operands = 0;
2604 	    break;
2605 	  }
2606 
2607 	app_disable ();
2608 
2609 	if (GET_CODE (body) == SEQUENCE)
2610 	  {
2611 	    /* A delayed-branch sequence */
2612 	    int i;
2613 
2614 	    final_sequence = body;
2615 
2616 	    /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2617 	       force the restoration of a comparison that was previously
2618 	       thought unnecessary.  If that happens, cancel this sequence
2619 	       and cause that insn to be restored.  */
2620 
2621 	    next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2622 	    if (next != XVECEXP (body, 0, 1))
2623 	      {
2624 		final_sequence = 0;
2625 		return next;
2626 	      }
2627 
2628 	    for (i = 1; i < XVECLEN (body, 0); i++)
2629 	      {
2630 		rtx insn = XVECEXP (body, 0, i);
2631 		rtx next = NEXT_INSN (insn);
2632 		/* We loop in case any instruction in a delay slot gets
2633 		   split.  */
2634 		do
2635 		  insn = final_scan_insn (insn, file, 0, 1, seen);
2636 		while (insn != next);
2637 	      }
2638 #ifdef DBR_OUTPUT_SEQEND
2639 	    DBR_OUTPUT_SEQEND (file);
2640 #endif
2641 	    final_sequence = 0;
2642 
2643 	    /* If the insn requiring the delay slot was a CALL_INSN, the
2644 	       insns in the delay slot are actually executed before the
2645 	       called function.  Hence we don't preserve any CC-setting
2646 	       actions in these insns and the CC must be marked as being
2647 	       clobbered by the function.  */
2648 	    if (CALL_P (XVECEXP (body, 0, 0)))
2649 	      {
2650 		CC_STATUS_INIT;
2651 	      }
2652 	    break;
2653 	  }
2654 
2655 	/* We have a real machine instruction as rtl.  */
2656 
2657 	body = PATTERN (insn);
2658 
2659 #ifdef HAVE_cc0
2660 	set = single_set (insn);
2661 
2662 	/* Check for redundant test and compare instructions
2663 	   (when the condition codes are already set up as desired).
2664 	   This is done only when optimizing; if not optimizing,
2665 	   it should be possible for the user to alter a variable
2666 	   with the debugger in between statements
2667 	   and the next statement should reexamine the variable
2668 	   to compute the condition codes.  */
2669 
2670 	if (optimize_p)
2671 	  {
2672 	    if (set
2673 		&& GET_CODE (SET_DEST (set)) == CC0
2674 		&& insn != last_ignored_compare)
2675 	      {
2676 		rtx src1, src2;
2677 		if (GET_CODE (SET_SRC (set)) == SUBREG)
2678 		  SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2679 
2680 		src1 = SET_SRC (set);
2681 		src2 = NULL_RTX;
2682 		if (GET_CODE (SET_SRC (set)) == COMPARE)
2683 		  {
2684 		    if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2685 		      XEXP (SET_SRC (set), 0)
2686 			= alter_subreg (&XEXP (SET_SRC (set), 0), true);
2687 		    if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2688 		      XEXP (SET_SRC (set), 1)
2689 			= alter_subreg (&XEXP (SET_SRC (set), 1), true);
2690 		    if (XEXP (SET_SRC (set), 1)
2691 			== CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2692 		      src2 = XEXP (SET_SRC (set), 0);
2693 		  }
2694 		if ((cc_status.value1 != 0
2695 		     && rtx_equal_p (src1, cc_status.value1))
2696 		    || (cc_status.value2 != 0
2697 			&& rtx_equal_p (src1, cc_status.value2))
2698 		    || (src2 != 0 && cc_status.value1 != 0
2699 		        && rtx_equal_p (src2, cc_status.value1))
2700 		    || (src2 != 0 && cc_status.value2 != 0
2701 			&& rtx_equal_p (src2, cc_status.value2)))
2702 		  {
2703 		    /* Don't delete insn if it has an addressing side-effect.  */
2704 		    if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2705 			/* or if anything in it is volatile.  */
2706 			&& ! volatile_refs_p (PATTERN (insn)))
2707 		      {
2708 			/* We don't really delete the insn; just ignore it.  */
2709 			last_ignored_compare = insn;
2710 			break;
2711 		      }
2712 		  }
2713 	      }
2714 	  }
2715 
2716 	/* If this is a conditional branch, maybe modify it
2717 	   if the cc's are in a nonstandard state
2718 	   so that it accomplishes the same thing that it would
2719 	   do straightforwardly if the cc's were set up normally.  */
2720 
2721 	if (cc_status.flags != 0
2722 	    && JUMP_P (insn)
2723 	    && GET_CODE (body) == SET
2724 	    && SET_DEST (body) == pc_rtx
2725 	    && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2726 	    && COMPARISON_P (XEXP (SET_SRC (body), 0))
2727 	    && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2728 	  {
2729 	    /* This function may alter the contents of its argument
2730 	       and clear some of the cc_status.flags bits.
2731 	       It may also return 1 meaning condition now always true
2732 	       or -1 meaning condition now always false
2733 	       or 2 meaning condition nontrivial but altered.  */
2734 	    int result = alter_cond (XEXP (SET_SRC (body), 0));
2735 	    /* If condition now has fixed value, replace the IF_THEN_ELSE
2736 	       with its then-operand or its else-operand.  */
2737 	    if (result == 1)
2738 	      SET_SRC (body) = XEXP (SET_SRC (body), 1);
2739 	    if (result == -1)
2740 	      SET_SRC (body) = XEXP (SET_SRC (body), 2);
2741 
2742 	    /* The jump is now either unconditional or a no-op.
2743 	       If it has become a no-op, don't try to output it.
2744 	       (It would not be recognized.)  */
2745 	    if (SET_SRC (body) == pc_rtx)
2746 	      {
2747 	        delete_insn (insn);
2748 		break;
2749 	      }
2750 	    else if (ANY_RETURN_P (SET_SRC (body)))
2751 	      /* Replace (set (pc) (return)) with (return).  */
2752 	      PATTERN (insn) = body = SET_SRC (body);
2753 
2754 	    /* Rerecognize the instruction if it has changed.  */
2755 	    if (result != 0)
2756 	      INSN_CODE (insn) = -1;
2757 	  }
2758 
2759 	/* If this is a conditional trap, maybe modify it if the cc's
2760 	   are in a nonstandard state so that it accomplishes the same
2761 	   thing that it would do straightforwardly if the cc's were
2762 	   set up normally.  */
2763 	if (cc_status.flags != 0
2764 	    && NONJUMP_INSN_P (insn)
2765 	    && GET_CODE (body) == TRAP_IF
2766 	    && COMPARISON_P (TRAP_CONDITION (body))
2767 	    && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2768 	  {
2769 	    /* This function may alter the contents of its argument
2770 	       and clear some of the cc_status.flags bits.
2771 	       It may also return 1 meaning condition now always true
2772 	       or -1 meaning condition now always false
2773 	       or 2 meaning condition nontrivial but altered.  */
2774 	    int result = alter_cond (TRAP_CONDITION (body));
2775 
2776 	    /* If TRAP_CONDITION has become always false, delete the
2777 	       instruction.  */
2778 	    if (result == -1)
2779 	      {
2780 		delete_insn (insn);
2781 		break;
2782 	      }
2783 
2784 	    /* If TRAP_CONDITION has become always true, replace
2785 	       TRAP_CONDITION with const_true_rtx.  */
2786 	    if (result == 1)
2787 	      TRAP_CONDITION (body) = const_true_rtx;
2788 
2789 	    /* Rerecognize the instruction if it has changed.  */
2790 	    if (result != 0)
2791 	      INSN_CODE (insn) = -1;
2792 	  }
2793 
2794 	/* Make same adjustments to instructions that examine the
2795 	   condition codes without jumping and instructions that
2796 	   handle conditional moves (if this machine has either one).  */
2797 
2798 	if (cc_status.flags != 0
2799 	    && set != 0)
2800 	  {
2801 	    rtx cond_rtx, then_rtx, else_rtx;
2802 
2803 	    if (!JUMP_P (insn)
2804 		&& GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2805 	      {
2806 		cond_rtx = XEXP (SET_SRC (set), 0);
2807 		then_rtx = XEXP (SET_SRC (set), 1);
2808 		else_rtx = XEXP (SET_SRC (set), 2);
2809 	      }
2810 	    else
2811 	      {
2812 		cond_rtx = SET_SRC (set);
2813 		then_rtx = const_true_rtx;
2814 		else_rtx = const0_rtx;
2815 	      }
2816 
2817 	    if (COMPARISON_P (cond_rtx)
2818 		&& XEXP (cond_rtx, 0) == cc0_rtx)
2819 	      {
2820 		int result;
2821 		result = alter_cond (cond_rtx);
2822 		if (result == 1)
2823 		  validate_change (insn, &SET_SRC (set), then_rtx, 0);
2824 		else if (result == -1)
2825 		  validate_change (insn, &SET_SRC (set), else_rtx, 0);
2826 		else if (result == 2)
2827 		  INSN_CODE (insn) = -1;
2828 		if (SET_DEST (set) == SET_SRC (set))
2829 		  delete_insn (insn);
2830 	      }
2831 	  }
2832 
2833 #endif
2834 
2835 #ifdef HAVE_peephole
2836 	/* Do machine-specific peephole optimizations if desired.  */
2837 
2838 	if (optimize_p && !flag_no_peephole && !nopeepholes)
2839 	  {
2840 	    rtx next = peephole (insn);
2841 	    /* When peepholing, if there were notes within the peephole,
2842 	       emit them before the peephole.  */
2843 	    if (next != 0 && next != NEXT_INSN (insn))
2844 	      {
2845 		rtx note, prev = PREV_INSN (insn);
2846 
2847 		for (note = NEXT_INSN (insn); note != next;
2848 		     note = NEXT_INSN (note))
2849 		  final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2850 
2851 		/* Put the notes in the proper position for a later
2852 		   rescan.  For example, the SH target can do this
2853 		   when generating a far jump in a delayed branch
2854 		   sequence.  */
2855 		note = NEXT_INSN (insn);
2856 		PREV_INSN (note) = prev;
2857 		NEXT_INSN (prev) = note;
2858 		NEXT_INSN (PREV_INSN (next)) = insn;
2859 		PREV_INSN (insn) = PREV_INSN (next);
2860 		NEXT_INSN (insn) = next;
2861 		PREV_INSN (next) = insn;
2862 	      }
2863 
2864 	    /* PEEPHOLE might have changed this.  */
2865 	    body = PATTERN (insn);
2866 	  }
2867 #endif
2868 
2869 	/* Try to recognize the instruction.
2870 	   If successful, verify that the operands satisfy the
2871 	   constraints for the instruction.  Crash if they don't,
2872 	   since `reload' should have changed them so that they do.  */
2873 
2874 	insn_code_number = recog_memoized (insn);
2875 	cleanup_subreg_operands (insn);
2876 
2877 	/* Dump the insn in the assembly for debugging (-dAP).
2878 	   If the final dump is requested as slim RTL, dump slim
2879 	   RTL to the assembly file also.  */
2880 	if (flag_dump_rtl_in_asm)
2881 	  {
2882 	    print_rtx_head = ASM_COMMENT_START;
2883 	    if (! (dump_flags & TDF_SLIM))
2884 	      print_rtl_single (asm_out_file, insn);
2885 	    else
2886 	      dump_insn_slim (asm_out_file, insn);
2887 	    print_rtx_head = "";
2888 	  }
2889 
2890 	if (! constrain_operands_cached (1))
2891 	  fatal_insn_not_found (insn);
2892 
2893 	/* Some target machines need to prescan each insn before
2894 	   it is output.  */
2895 
2896 #ifdef FINAL_PRESCAN_INSN
2897 	FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2898 #endif
2899 
2900 	if (targetm.have_conditional_execution ()
2901 	    && GET_CODE (PATTERN (insn)) == COND_EXEC)
2902 	  current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2903 
2904 #ifdef HAVE_cc0
2905 	cc_prev_status = cc_status;
2906 
2907 	/* Update `cc_status' for this instruction.
2908 	   The instruction's output routine may change it further.
2909 	   If the output routine for a jump insn needs to depend
2910 	   on the cc status, it should look at cc_prev_status.  */
2911 
2912 	NOTICE_UPDATE_CC (body, insn);
2913 #endif
2914 
2915 	current_output_insn = debug_insn = insn;
2916 
2917 	/* Find the proper template for this insn.  */
2918 	templ = get_insn_template (insn_code_number, insn);
2919 
2920 	/* If the C code returns 0, it means that it is a jump insn
2921 	   which follows a deleted test insn, and that test insn
2922 	   needs to be reinserted.  */
2923 	if (templ == 0)
2924 	  {
2925 	    rtx prev;
2926 
2927 	    gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2928 
2929 	    /* We have already processed the notes between the setter and
2930 	       the user.  Make sure we don't process them again, this is
2931 	       particularly important if one of the notes is a block
2932 	       scope note or an EH note.  */
2933 	    for (prev = insn;
2934 		 prev != last_ignored_compare;
2935 		 prev = PREV_INSN (prev))
2936 	      {
2937 		if (NOTE_P (prev))
2938 		  delete_insn (prev);	/* Use delete_note.  */
2939 	      }
2940 
2941 	    return prev;
2942 	  }
2943 
2944 	/* If the template is the string "#", it means that this insn must
2945 	   be split.  */
2946 	if (templ[0] == '#' && templ[1] == '\0')
2947 	  {
2948 	    rtx new_rtx = try_split (body, insn, 0);
2949 
2950 	    /* If we didn't split the insn, go away.  */
2951 	    if (new_rtx == insn && PATTERN (new_rtx) == body)
2952 	      fatal_insn ("could not split insn", insn);
2953 
2954 	    /* If we have a length attribute, this instruction should have
2955 	       been split in shorten_branches, to ensure that we would have
2956 	       valid length info for the splitees.  */
2957 	    gcc_assert (!HAVE_ATTR_length);
2958 
2959 	    return new_rtx;
2960 	  }
2961 
2962 	/* ??? This will put the directives in the wrong place if
2963 	   get_insn_template outputs assembly directly.  However calling it
2964 	   before get_insn_template breaks if the insns is split.  */
2965 	if (targetm.asm_out.unwind_emit_before_insn
2966 	    && targetm.asm_out.unwind_emit)
2967 	  targetm.asm_out.unwind_emit (asm_out_file, insn);
2968 
2969 	if (CALL_P (insn))
2970 	  {
2971 	    rtx x = call_from_call_insn (insn);
2972 	    x = XEXP (x, 0);
2973 	    if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2974 	      {
2975 		tree t;
2976 		x = XEXP (x, 0);
2977 		t = SYMBOL_REF_DECL (x);
2978 		if (t)
2979 		  assemble_external (t);
2980 	      }
2981 	    if (!DECL_IGNORED_P (current_function_decl))
2982 	      debug_hooks->var_location (insn);
2983 	  }
2984 
2985 	/* Output assembler code from the template.  */
2986 	output_asm_insn (templ, recog_data.operand);
2987 
2988 	/* Some target machines need to postscan each insn after
2989 	   it is output.  */
2990 	if (targetm.asm_out.final_postscan_insn)
2991 	  targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2992 					       recog_data.n_operands);
2993 
2994 	if (!targetm.asm_out.unwind_emit_before_insn
2995 	    && targetm.asm_out.unwind_emit)
2996 	  targetm.asm_out.unwind_emit (asm_out_file, insn);
2997 
2998 	current_output_insn = debug_insn = 0;
2999       }
3000     }
3001   return NEXT_INSN (insn);
3002 }
3003 
3004 /* Return whether a source line note needs to be emitted before INSN.
3005    Sets IS_STMT to TRUE if the line should be marked as a possible
3006    breakpoint location.  */
3007 
3008 static bool
notice_source_line(rtx insn,bool * is_stmt)3009 notice_source_line (rtx insn, bool *is_stmt)
3010 {
3011   const char *filename;
3012   int linenum;
3013 
3014   if (override_filename)
3015     {
3016       filename = override_filename;
3017       linenum = override_linenum;
3018     }
3019   else
3020     {
3021       filename = insn_file (insn);
3022       linenum = insn_line (insn);
3023     }
3024 
3025   if (filename == NULL)
3026     return false;
3027 
3028   if (force_source_line
3029       || filename != last_filename
3030       || last_linenum != linenum)
3031     {
3032       force_source_line = false;
3033       last_filename = filename;
3034       last_linenum = linenum;
3035       last_discriminator = discriminator;
3036       *is_stmt = true;
3037       high_block_linenum = MAX (last_linenum, high_block_linenum);
3038       high_function_linenum = MAX (last_linenum, high_function_linenum);
3039       return true;
3040     }
3041 
3042   if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3043     {
3044       /* If the discriminator changed, but the line number did not,
3045          output the line table entry with is_stmt false so the
3046          debugger does not treat this as a breakpoint location.  */
3047       last_discriminator = discriminator;
3048       *is_stmt = false;
3049       return true;
3050     }
3051 
3052   return false;
3053 }
3054 
3055 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3056    directly to the desired hard register.  */
3057 
3058 void
cleanup_subreg_operands(rtx insn)3059 cleanup_subreg_operands (rtx insn)
3060 {
3061   int i;
3062   bool changed = false;
3063   extract_insn_cached (insn);
3064   for (i = 0; i < recog_data.n_operands; i++)
3065     {
3066       /* The following test cannot use recog_data.operand when testing
3067 	 for a SUBREG: the underlying object might have been changed
3068 	 already if we are inside a match_operator expression that
3069 	 matches the else clause.  Instead we test the underlying
3070 	 expression directly.  */
3071       if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3072 	{
3073 	  recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3074 	  changed = true;
3075 	}
3076       else if (GET_CODE (recog_data.operand[i]) == PLUS
3077 	       || GET_CODE (recog_data.operand[i]) == MULT
3078 	       || MEM_P (recog_data.operand[i]))
3079 	recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3080     }
3081 
3082   for (i = 0; i < recog_data.n_dups; i++)
3083     {
3084       if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3085 	{
3086 	  *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3087 	  changed = true;
3088 	}
3089       else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3090 	       || GET_CODE (*recog_data.dup_loc[i]) == MULT
3091 	       || MEM_P (*recog_data.dup_loc[i]))
3092 	*recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3093     }
3094   if (changed)
3095     df_insn_rescan (insn);
3096 }
3097 
3098 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3099    the thing it is a subreg of.  Do it anyway if FINAL_P.  */
3100 
3101 rtx
alter_subreg(rtx * xp,bool final_p)3102 alter_subreg (rtx *xp, bool final_p)
3103 {
3104   rtx x = *xp;
3105   rtx y = SUBREG_REG (x);
3106 
3107   /* simplify_subreg does not remove subreg from volatile references.
3108      We are required to.  */
3109   if (MEM_P (y))
3110     {
3111       int offset = SUBREG_BYTE (x);
3112 
3113       /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3114 	 contains 0 instead of the proper offset.  See simplify_subreg.  */
3115       if (offset == 0
3116 	  && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
3117         {
3118           int difference = GET_MODE_SIZE (GET_MODE (y))
3119 			   - GET_MODE_SIZE (GET_MODE (x));
3120           if (WORDS_BIG_ENDIAN)
3121             offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3122           if (BYTES_BIG_ENDIAN)
3123             offset += difference % UNITS_PER_WORD;
3124         }
3125 
3126       if (final_p)
3127 	*xp = adjust_address (y, GET_MODE (x), offset);
3128       else
3129 	*xp = adjust_address_nv (y, GET_MODE (x), offset);
3130     }
3131   else
3132     {
3133       rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3134 				     SUBREG_BYTE (x));
3135 
3136       if (new_rtx != 0)
3137 	*xp = new_rtx;
3138       else if (final_p && REG_P (y))
3139 	{
3140 	  /* Simplify_subreg can't handle some REG cases, but we have to.  */
3141 	  unsigned int regno;
3142 	  HOST_WIDE_INT offset;
3143 
3144 	  regno = subreg_regno (x);
3145 	  if (subreg_lowpart_p (x))
3146 	    offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3147 	  else
3148 	    offset = SUBREG_BYTE (x);
3149 	  *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3150 	}
3151     }
3152 
3153   return *xp;
3154 }
3155 
3156 /* Do alter_subreg on all the SUBREGs contained in X.  */
3157 
3158 static rtx
walk_alter_subreg(rtx * xp,bool * changed)3159 walk_alter_subreg (rtx *xp, bool *changed)
3160 {
3161   rtx x = *xp;
3162   switch (GET_CODE (x))
3163     {
3164     case PLUS:
3165     case MULT:
3166     case AND:
3167       XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3168       XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3169       break;
3170 
3171     case MEM:
3172     case ZERO_EXTEND:
3173       XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3174       break;
3175 
3176     case SUBREG:
3177       *changed = true;
3178       return alter_subreg (xp, true);
3179 
3180     default:
3181       break;
3182     }
3183 
3184   return *xp;
3185 }
3186 
3187 #ifdef HAVE_cc0
3188 
3189 /* Given BODY, the body of a jump instruction, alter the jump condition
3190    as required by the bits that are set in cc_status.flags.
3191    Not all of the bits there can be handled at this level in all cases.
3192 
3193    The value is normally 0.
3194    1 means that the condition has become always true.
3195    -1 means that the condition has become always false.
3196    2 means that COND has been altered.  */
3197 
3198 static int
alter_cond(rtx cond)3199 alter_cond (rtx cond)
3200 {
3201   int value = 0;
3202 
3203   if (cc_status.flags & CC_REVERSED)
3204     {
3205       value = 2;
3206       PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3207     }
3208 
3209   if (cc_status.flags & CC_INVERTED)
3210     {
3211       value = 2;
3212       PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3213     }
3214 
3215   if (cc_status.flags & CC_NOT_POSITIVE)
3216     switch (GET_CODE (cond))
3217       {
3218       case LE:
3219       case LEU:
3220       case GEU:
3221 	/* Jump becomes unconditional.  */
3222 	return 1;
3223 
3224       case GT:
3225       case GTU:
3226       case LTU:
3227 	/* Jump becomes no-op.  */
3228 	return -1;
3229 
3230       case GE:
3231 	PUT_CODE (cond, EQ);
3232 	value = 2;
3233 	break;
3234 
3235       case LT:
3236 	PUT_CODE (cond, NE);
3237 	value = 2;
3238 	break;
3239 
3240       default:
3241 	break;
3242       }
3243 
3244   if (cc_status.flags & CC_NOT_NEGATIVE)
3245     switch (GET_CODE (cond))
3246       {
3247       case GE:
3248       case GEU:
3249 	/* Jump becomes unconditional.  */
3250 	return 1;
3251 
3252       case LT:
3253       case LTU:
3254 	/* Jump becomes no-op.  */
3255 	return -1;
3256 
3257       case LE:
3258       case LEU:
3259 	PUT_CODE (cond, EQ);
3260 	value = 2;
3261 	break;
3262 
3263       case GT:
3264       case GTU:
3265 	PUT_CODE (cond, NE);
3266 	value = 2;
3267 	break;
3268 
3269       default:
3270 	break;
3271       }
3272 
3273   if (cc_status.flags & CC_NO_OVERFLOW)
3274     switch (GET_CODE (cond))
3275       {
3276       case GEU:
3277 	/* Jump becomes unconditional.  */
3278 	return 1;
3279 
3280       case LEU:
3281 	PUT_CODE (cond, EQ);
3282 	value = 2;
3283 	break;
3284 
3285       case GTU:
3286 	PUT_CODE (cond, NE);
3287 	value = 2;
3288 	break;
3289 
3290       case LTU:
3291 	/* Jump becomes no-op.  */
3292 	return -1;
3293 
3294       default:
3295 	break;
3296       }
3297 
3298   if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3299     switch (GET_CODE (cond))
3300       {
3301       default:
3302 	gcc_unreachable ();
3303 
3304       case NE:
3305 	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3306 	value = 2;
3307 	break;
3308 
3309       case EQ:
3310 	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3311 	value = 2;
3312 	break;
3313       }
3314 
3315   if (cc_status.flags & CC_NOT_SIGNED)
3316     /* The flags are valid if signed condition operators are converted
3317        to unsigned.  */
3318     switch (GET_CODE (cond))
3319       {
3320       case LE:
3321 	PUT_CODE (cond, LEU);
3322 	value = 2;
3323 	break;
3324 
3325       case LT:
3326 	PUT_CODE (cond, LTU);
3327 	value = 2;
3328 	break;
3329 
3330       case GT:
3331 	PUT_CODE (cond, GTU);
3332 	value = 2;
3333 	break;
3334 
3335       case GE:
3336 	PUT_CODE (cond, GEU);
3337 	value = 2;
3338 	break;
3339 
3340       default:
3341 	break;
3342       }
3343 
3344   return value;
3345 }
3346 #endif
3347 
3348 /* Report inconsistency between the assembler template and the operands.
3349    In an `asm', it's the user's fault; otherwise, the compiler's fault.  */
3350 
3351 void
output_operand_lossage(const char * cmsgid,...)3352 output_operand_lossage (const char *cmsgid, ...)
3353 {
3354   char *fmt_string;
3355   char *new_message;
3356   const char *pfx_str;
3357   va_list ap;
3358 
3359   va_start (ap, cmsgid);
3360 
3361   pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3362   asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
3363   vasprintf (&new_message, fmt_string, ap);
3364 
3365   if (this_is_asm_operands)
3366     error_for_asm (this_is_asm_operands, "%s", new_message);
3367   else
3368     internal_error ("%s", new_message);
3369 
3370   free (fmt_string);
3371   free (new_message);
3372   va_end (ap);
3373 }
3374 
3375 /* Output of assembler code from a template, and its subroutines.  */
3376 
3377 /* Annotate the assembly with a comment describing the pattern and
3378    alternative used.  */
3379 
3380 static void
output_asm_name(void)3381 output_asm_name (void)
3382 {
3383   if (debug_insn)
3384     {
3385       int num = INSN_CODE (debug_insn);
3386       fprintf (asm_out_file, "\t%s %d\t%s",
3387 	       ASM_COMMENT_START, INSN_UID (debug_insn),
3388 	       insn_data[num].name);
3389       if (insn_data[num].n_alternatives > 1)
3390 	fprintf (asm_out_file, "/%d", which_alternative + 1);
3391 
3392       if (HAVE_ATTR_length)
3393 	fprintf (asm_out_file, "\t[length = %d]",
3394 		 get_attr_length (debug_insn));
3395 
3396       /* Clear this so only the first assembler insn
3397 	 of any rtl insn will get the special comment for -dp.  */
3398       debug_insn = 0;
3399     }
3400 }
3401 
3402 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3403    or its address, return that expr .  Set *PADDRESSP to 1 if the expr
3404    corresponds to the address of the object and 0 if to the object.  */
3405 
3406 static tree
get_mem_expr_from_op(rtx op,int * paddressp)3407 get_mem_expr_from_op (rtx op, int *paddressp)
3408 {
3409   tree expr;
3410   int inner_addressp;
3411 
3412   *paddressp = 0;
3413 
3414   if (REG_P (op))
3415     return REG_EXPR (op);
3416   else if (!MEM_P (op))
3417     return 0;
3418 
3419   if (MEM_EXPR (op) != 0)
3420     return MEM_EXPR (op);
3421 
3422   /* Otherwise we have an address, so indicate it and look at the address.  */
3423   *paddressp = 1;
3424   op = XEXP (op, 0);
3425 
3426   /* First check if we have a decl for the address, then look at the right side
3427      if it is a PLUS.  Otherwise, strip off arithmetic and keep looking.
3428      But don't allow the address to itself be indirect.  */
3429   if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3430     return expr;
3431   else if (GET_CODE (op) == PLUS
3432 	   && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3433     return expr;
3434 
3435   while (UNARY_P (op)
3436 	 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3437     op = XEXP (op, 0);
3438 
3439   expr = get_mem_expr_from_op (op, &inner_addressp);
3440   return inner_addressp ? 0 : expr;
3441 }
3442 
3443 /* Output operand names for assembler instructions.  OPERANDS is the
3444    operand vector, OPORDER is the order to write the operands, and NOPS
3445    is the number of operands to write.  */
3446 
3447 static void
output_asm_operand_names(rtx * operands,int * oporder,int nops)3448 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3449 {
3450   int wrote = 0;
3451   int i;
3452 
3453   for (i = 0; i < nops; i++)
3454     {
3455       int addressp;
3456       rtx op = operands[oporder[i]];
3457       tree expr = get_mem_expr_from_op (op, &addressp);
3458 
3459       fprintf (asm_out_file, "%c%s",
3460 	       wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3461       wrote = 1;
3462       if (expr)
3463 	{
3464 	  fprintf (asm_out_file, "%s",
3465 		   addressp ? "*" : "");
3466 	  print_mem_expr (asm_out_file, expr);
3467 	  wrote = 1;
3468 	}
3469       else if (REG_P (op) && ORIGINAL_REGNO (op)
3470 	       && ORIGINAL_REGNO (op) != REGNO (op))
3471 	fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3472     }
3473 }
3474 
3475 #ifdef ASSEMBLER_DIALECT
3476 /* Helper function to parse assembler dialects in the asm string.
3477    This is called from output_asm_insn and asm_fprintf.  */
3478 static const char *
do_assembler_dialects(const char * p,int * dialect)3479 do_assembler_dialects (const char *p, int *dialect)
3480 {
3481   char c = *(p - 1);
3482 
3483   switch (c)
3484     {
3485     case '{':
3486       {
3487         int i;
3488 
3489         if (*dialect)
3490           output_operand_lossage ("nested assembly dialect alternatives");
3491         else
3492           *dialect = 1;
3493 
3494         /* If we want the first dialect, do nothing.  Otherwise, skip
3495            DIALECT_NUMBER of strings ending with '|'.  */
3496         for (i = 0; i < dialect_number; i++)
3497           {
3498             while (*p && *p != '}')
3499 	      {
3500 		if (*p == '|')
3501 		  {
3502 		    p++;
3503 		    break;
3504 		  }
3505 
3506 		/* Skip over any character after a percent sign.  */
3507 		if (*p == '%')
3508 		  p++;
3509 		if (*p)
3510 		  p++;
3511 	      }
3512 
3513             if (*p == '}')
3514 	      break;
3515           }
3516 
3517         if (*p == '\0')
3518           output_operand_lossage ("unterminated assembly dialect alternative");
3519       }
3520       break;
3521 
3522     case '|':
3523       if (*dialect)
3524         {
3525           /* Skip to close brace.  */
3526           do
3527             {
3528 	      if (*p == '\0')
3529 		{
3530 		  output_operand_lossage ("unterminated assembly dialect alternative");
3531 		  break;
3532 		}
3533 
3534 	      /* Skip over any character after a percent sign.  */
3535 	      if (*p == '%' && p[1])
3536 		{
3537 		  p += 2;
3538 		  continue;
3539 		}
3540 
3541 	      if (*p++ == '}')
3542 		break;
3543             }
3544           while (1);
3545 
3546           *dialect = 0;
3547         }
3548       else
3549         putc (c, asm_out_file);
3550       break;
3551 
3552     case '}':
3553       if (! *dialect)
3554         putc (c, asm_out_file);
3555       *dialect = 0;
3556       break;
3557     default:
3558       gcc_unreachable ();
3559     }
3560 
3561   return p;
3562 }
3563 #endif
3564 
3565 /* Output text from TEMPLATE to the assembler output file,
3566    obeying %-directions to substitute operands taken from
3567    the vector OPERANDS.
3568 
3569    %N (for N a digit) means print operand N in usual manner.
3570    %lN means require operand N to be a CODE_LABEL or LABEL_REF
3571       and print the label name with no punctuation.
3572    %cN means require operand N to be a constant
3573       and print the constant expression with no punctuation.
3574    %aN means expect operand N to be a memory address
3575       (not a memory reference!) and print a reference
3576       to that address.
3577    %nN means expect operand N to be a constant
3578       and print a constant expression for minus the value
3579       of the operand, with no other punctuation.  */
3580 
3581 void
output_asm_insn(const char * templ,rtx * operands)3582 output_asm_insn (const char *templ, rtx *operands)
3583 {
3584   const char *p;
3585   int c;
3586 #ifdef ASSEMBLER_DIALECT
3587   int dialect = 0;
3588 #endif
3589   int oporder[MAX_RECOG_OPERANDS];
3590   char opoutput[MAX_RECOG_OPERANDS];
3591   int ops = 0;
3592 
3593   /* An insn may return a null string template
3594      in a case where no assembler code is needed.  */
3595   if (*templ == 0)
3596     return;
3597 
3598   memset (opoutput, 0, sizeof opoutput);
3599   p = templ;
3600   putc ('\t', asm_out_file);
3601 
3602 #ifdef ASM_OUTPUT_OPCODE
3603   ASM_OUTPUT_OPCODE (asm_out_file, p);
3604 #endif
3605 
3606   while ((c = *p++))
3607     switch (c)
3608       {
3609       case '\n':
3610 	if (flag_verbose_asm)
3611 	  output_asm_operand_names (operands, oporder, ops);
3612 	if (flag_print_asm_name)
3613 	  output_asm_name ();
3614 
3615 	ops = 0;
3616 	memset (opoutput, 0, sizeof opoutput);
3617 
3618 	putc (c, asm_out_file);
3619 #ifdef ASM_OUTPUT_OPCODE
3620 	while ((c = *p) == '\t')
3621 	  {
3622 	    putc (c, asm_out_file);
3623 	    p++;
3624 	  }
3625 	ASM_OUTPUT_OPCODE (asm_out_file, p);
3626 #endif
3627 	break;
3628 
3629 #ifdef ASSEMBLER_DIALECT
3630       case '{':
3631       case '}':
3632       case '|':
3633 	p = do_assembler_dialects (p, &dialect);
3634 	break;
3635 #endif
3636 
3637       case '%':
3638 	/* %% outputs a single %.  %{, %} and %| print {, } and | respectively
3639 	   if ASSEMBLER_DIALECT defined and these characters have a special
3640 	   meaning as dialect delimiters.*/
3641 	if (*p == '%'
3642 #ifdef ASSEMBLER_DIALECT
3643 	    || *p == '{' || *p == '}' || *p == '|'
3644 #endif
3645 	    )
3646 	  {
3647 	    putc (*p, asm_out_file);
3648 	    p++;
3649 	  }
3650 	/* %= outputs a number which is unique to each insn in the entire
3651 	   compilation.  This is useful for making local labels that are
3652 	   referred to more than once in a given insn.  */
3653 	else if (*p == '=')
3654 	  {
3655 	    p++;
3656 	    fprintf (asm_out_file, "%d", insn_counter);
3657 	  }
3658 	/* % followed by a letter and some digits
3659 	   outputs an operand in a special way depending on the letter.
3660 	   Letters `acln' are implemented directly.
3661 	   Other letters are passed to `output_operand' so that
3662 	   the TARGET_PRINT_OPERAND hook can define them.  */
3663 	else if (ISALPHA (*p))
3664 	  {
3665 	    int letter = *p++;
3666 	    unsigned long opnum;
3667 	    char *endptr;
3668 
3669 	    opnum = strtoul (p, &endptr, 10);
3670 
3671 	    if (endptr == p)
3672 	      output_operand_lossage ("operand number missing "
3673 				      "after %%-letter");
3674 	    else if (this_is_asm_operands && opnum >= insn_noperands)
3675 	      output_operand_lossage ("operand number out of range");
3676 	    else if (letter == 'l')
3677 	      output_asm_label (operands[opnum]);
3678 	    else if (letter == 'a')
3679 	      output_address (operands[opnum]);
3680 	    else if (letter == 'c')
3681 	      {
3682 		if (CONSTANT_ADDRESS_P (operands[opnum]))
3683 		  output_addr_const (asm_out_file, operands[opnum]);
3684 		else
3685 		  output_operand (operands[opnum], 'c');
3686 	      }
3687 	    else if (letter == 'n')
3688 	      {
3689 		if (CONST_INT_P (operands[opnum]))
3690 		  fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3691 			   - INTVAL (operands[opnum]));
3692 		else
3693 		  {
3694 		    putc ('-', asm_out_file);
3695 		    output_addr_const (asm_out_file, operands[opnum]);
3696 		  }
3697 	      }
3698 	    else
3699 	      output_operand (operands[opnum], letter);
3700 
3701 	    if (!opoutput[opnum])
3702 	      oporder[ops++] = opnum;
3703 	    opoutput[opnum] = 1;
3704 
3705 	    p = endptr;
3706 	    c = *p;
3707 	  }
3708 	/* % followed by a digit outputs an operand the default way.  */
3709 	else if (ISDIGIT (*p))
3710 	  {
3711 	    unsigned long opnum;
3712 	    char *endptr;
3713 
3714 	    opnum = strtoul (p, &endptr, 10);
3715 	    if (this_is_asm_operands && opnum >= insn_noperands)
3716 	      output_operand_lossage ("operand number out of range");
3717 	    else
3718 	      output_operand (operands[opnum], 0);
3719 
3720 	    if (!opoutput[opnum])
3721 	      oporder[ops++] = opnum;
3722 	    opoutput[opnum] = 1;
3723 
3724 	    p = endptr;
3725 	    c = *p;
3726 	  }
3727 	/* % followed by punctuation: output something for that
3728 	   punctuation character alone, with no operand.  The
3729 	   TARGET_PRINT_OPERAND hook decides what is actually done.  */
3730 	else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3731 	  output_operand (NULL_RTX, *p++);
3732 	else
3733 	  output_operand_lossage ("invalid %%-code");
3734 	break;
3735 
3736       default:
3737 	putc (c, asm_out_file);
3738       }
3739 
3740   /* Write out the variable names for operands, if we know them.  */
3741   if (flag_verbose_asm)
3742     output_asm_operand_names (operands, oporder, ops);
3743   if (flag_print_asm_name)
3744     output_asm_name ();
3745 
3746   putc ('\n', asm_out_file);
3747 }
3748 
3749 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol.  */
3750 
3751 void
output_asm_label(rtx x)3752 output_asm_label (rtx x)
3753 {
3754   char buf[256];
3755 
3756   if (GET_CODE (x) == LABEL_REF)
3757     x = XEXP (x, 0);
3758   if (LABEL_P (x)
3759       || (NOTE_P (x)
3760 	  && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3761     ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3762   else
3763     output_operand_lossage ("'%%l' operand isn't a label");
3764 
3765   assemble_name (asm_out_file, buf);
3766 }
3767 
3768 /* Helper rtx-iteration-function for mark_symbol_refs_as_used and
3769    output_operand.  Marks SYMBOL_REFs as referenced through use of
3770    assemble_external.  */
3771 
3772 static int
mark_symbol_ref_as_used(rtx * xp,void * dummy ATTRIBUTE_UNUSED)3773 mark_symbol_ref_as_used (rtx *xp, void *dummy ATTRIBUTE_UNUSED)
3774 {
3775   rtx x = *xp;
3776 
3777   /* If we have a used symbol, we may have to emit assembly
3778      annotations corresponding to whether the symbol is external, weak
3779      or has non-default visibility.  */
3780   if (GET_CODE (x) == SYMBOL_REF)
3781     {
3782       tree t;
3783 
3784       t = SYMBOL_REF_DECL (x);
3785       if (t)
3786 	assemble_external (t);
3787 
3788       return -1;
3789     }
3790 
3791   return 0;
3792 }
3793 
3794 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external.  */
3795 
3796 void
mark_symbol_refs_as_used(rtx x)3797 mark_symbol_refs_as_used (rtx x)
3798 {
3799   for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3800 }
3801 
3802 /* Print operand X using machine-dependent assembler syntax.
3803    CODE is a non-digit that preceded the operand-number in the % spec,
3804    such as 'z' if the spec was `%z3'.  CODE is 0 if there was no char
3805    between the % and the digits.
3806    When CODE is a non-letter, X is 0.
3807 
3808    The meanings of the letters are machine-dependent and controlled
3809    by TARGET_PRINT_OPERAND.  */
3810 
3811 void
output_operand(rtx x,int code ATTRIBUTE_UNUSED)3812 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3813 {
3814   if (x && GET_CODE (x) == SUBREG)
3815     x = alter_subreg (&x, true);
3816 
3817   /* X must not be a pseudo reg.  */
3818   gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3819 
3820   targetm.asm_out.print_operand (asm_out_file, x, code);
3821 
3822   if (x == NULL_RTX)
3823     return;
3824 
3825   for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3826 }
3827 
3828 /* Print a memory reference operand for address X using
3829    machine-dependent assembler syntax.  */
3830 
3831 void
output_address(rtx x)3832 output_address (rtx x)
3833 {
3834   bool changed = false;
3835   walk_alter_subreg (&x, &changed);
3836   targetm.asm_out.print_operand_address (asm_out_file, x);
3837 }
3838 
3839 /* Print an integer constant expression in assembler syntax.
3840    Addition and subtraction are the only arithmetic
3841    that may appear in these expressions.  */
3842 
3843 void
output_addr_const(FILE * file,rtx x)3844 output_addr_const (FILE *file, rtx x)
3845 {
3846   char buf[256];
3847 
3848  restart:
3849   switch (GET_CODE (x))
3850     {
3851     case PC:
3852       putc ('.', file);
3853       break;
3854 
3855     case SYMBOL_REF:
3856       if (SYMBOL_REF_DECL (x))
3857 	assemble_external (SYMBOL_REF_DECL (x));
3858 #ifdef ASM_OUTPUT_SYMBOL_REF
3859       ASM_OUTPUT_SYMBOL_REF (file, x);
3860 #else
3861       assemble_name (file, XSTR (x, 0));
3862 #endif
3863       break;
3864 
3865     case LABEL_REF:
3866       x = XEXP (x, 0);
3867       /* Fall through.  */
3868     case CODE_LABEL:
3869       ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3870 #ifdef ASM_OUTPUT_LABEL_REF
3871       ASM_OUTPUT_LABEL_REF (file, buf);
3872 #else
3873       assemble_name (file, buf);
3874 #endif
3875       break;
3876 
3877     case CONST_INT:
3878       fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3879       break;
3880 
3881     case CONST:
3882       /* This used to output parentheses around the expression,
3883 	 but that does not work on the 386 (either ATT or BSD assembler).  */
3884       output_addr_const (file, XEXP (x, 0));
3885       break;
3886 
3887     case CONST_DOUBLE:
3888       if (GET_MODE (x) == VOIDmode)
3889 	{
3890 	  /* We can use %d if the number is one word and positive.  */
3891 	  if (CONST_DOUBLE_HIGH (x))
3892 	    fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3893 		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3894 		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3895 	  else if (CONST_DOUBLE_LOW (x) < 0)
3896 	    fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3897 		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3898 	  else
3899 	    fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3900 	}
3901       else
3902 	/* We can't handle floating point constants;
3903 	   PRINT_OPERAND must handle them.  */
3904 	output_operand_lossage ("floating constant misused");
3905       break;
3906 
3907     case CONST_FIXED:
3908       fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3909       break;
3910 
3911     case PLUS:
3912       /* Some assemblers need integer constants to appear last (eg masm).  */
3913       if (CONST_INT_P (XEXP (x, 0)))
3914 	{
3915 	  output_addr_const (file, XEXP (x, 1));
3916 	  if (INTVAL (XEXP (x, 0)) >= 0)
3917 	    fprintf (file, "+");
3918 	  output_addr_const (file, XEXP (x, 0));
3919 	}
3920       else
3921 	{
3922 	  output_addr_const (file, XEXP (x, 0));
3923 	  if (!CONST_INT_P (XEXP (x, 1))
3924 	      || INTVAL (XEXP (x, 1)) >= 0)
3925 	    fprintf (file, "+");
3926 	  output_addr_const (file, XEXP (x, 1));
3927 	}
3928       break;
3929 
3930     case MINUS:
3931       /* Avoid outputting things like x-x or x+5-x,
3932 	 since some assemblers can't handle that.  */
3933       x = simplify_subtraction (x);
3934       if (GET_CODE (x) != MINUS)
3935 	goto restart;
3936 
3937       output_addr_const (file, XEXP (x, 0));
3938       fprintf (file, "-");
3939       if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3940 	  || GET_CODE (XEXP (x, 1)) == PC
3941 	  || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3942 	output_addr_const (file, XEXP (x, 1));
3943       else
3944 	{
3945 	  fputs (targetm.asm_out.open_paren, file);
3946 	  output_addr_const (file, XEXP (x, 1));
3947 	  fputs (targetm.asm_out.close_paren, file);
3948 	}
3949       break;
3950 
3951     case ZERO_EXTEND:
3952     case SIGN_EXTEND:
3953     case SUBREG:
3954     case TRUNCATE:
3955       output_addr_const (file, XEXP (x, 0));
3956       break;
3957 
3958     default:
3959       if (targetm.asm_out.output_addr_const_extra (file, x))
3960 	break;
3961 
3962       output_operand_lossage ("invalid expression as operand");
3963     }
3964 }
3965 
3966 /* Output a quoted string.  */
3967 
3968 void
output_quoted_string(FILE * asm_file,const char * string)3969 output_quoted_string (FILE *asm_file, const char *string)
3970 {
3971 #ifdef OUTPUT_QUOTED_STRING
3972   OUTPUT_QUOTED_STRING (asm_file, string);
3973 #else
3974   char c;
3975 
3976   putc ('\"', asm_file);
3977   while ((c = *string++) != 0)
3978     {
3979       if (ISPRINT (c))
3980 	{
3981 	  if (c == '\"' || c == '\\')
3982 	    putc ('\\', asm_file);
3983 	  putc (c, asm_file);
3984 	}
3985       else
3986 	fprintf (asm_file, "\\%03o", (unsigned char) c);
3987     }
3988   putc ('\"', asm_file);
3989 #endif
3990 }
3991 
3992 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3993 
3994 void
fprint_whex(FILE * f,unsigned HOST_WIDE_INT value)3995 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
3996 {
3997   char buf[2 + CHAR_BIT * sizeof (value) / 4];
3998   if (value == 0)
3999     putc ('0', f);
4000   else
4001     {
4002       char *p = buf + sizeof (buf);
4003       do
4004         *--p = "0123456789abcdef"[value % 16];
4005       while ((value /= 16) != 0);
4006       *--p = 'x';
4007       *--p = '0';
4008       fwrite (p, 1, buf + sizeof (buf) - p, f);
4009     }
4010 }
4011 
4012 /* Internal function that prints an unsigned long in decimal in reverse.
4013    The output string IS NOT null-terminated. */
4014 
4015 static int
sprint_ul_rev(char * s,unsigned long value)4016 sprint_ul_rev (char *s, unsigned long value)
4017 {
4018   int i = 0;
4019   do
4020     {
4021       s[i] = "0123456789"[value % 10];
4022       value /= 10;
4023       i++;
4024       /* alternate version, without modulo */
4025       /* oldval = value; */
4026       /* value /= 10; */
4027       /* s[i] = "0123456789" [oldval - 10*value]; */
4028       /* i++ */
4029     }
4030   while (value != 0);
4031   return i;
4032 }
4033 
4034 /* Write an unsigned long as decimal to a file, fast. */
4035 
4036 void
fprint_ul(FILE * f,unsigned long value)4037 fprint_ul (FILE *f, unsigned long value)
4038 {
4039   /* python says: len(str(2**64)) == 20 */
4040   char s[20];
4041   int i;
4042 
4043   i = sprint_ul_rev (s, value);
4044 
4045   /* It's probably too small to bother with string reversal and fputs. */
4046   do
4047     {
4048       i--;
4049       putc (s[i], f);
4050     }
4051   while (i != 0);
4052 }
4053 
4054 /* Write an unsigned long as decimal to a string, fast.
4055    s must be wide enough to not overflow, at least 21 chars.
4056    Returns the length of the string (without terminating '\0'). */
4057 
4058 int
sprint_ul(char * s,unsigned long value)4059 sprint_ul (char *s, unsigned long value)
4060 {
4061   int len;
4062   char tmp_c;
4063   int i;
4064   int j;
4065 
4066   len = sprint_ul_rev (s, value);
4067   s[len] = '\0';
4068 
4069   /* Reverse the string. */
4070   i = 0;
4071   j = len - 1;
4072   while (i < j)
4073     {
4074       tmp_c = s[i];
4075       s[i] = s[j];
4076       s[j] = tmp_c;
4077       i++; j--;
4078     }
4079 
4080   return len;
4081 }
4082 
4083 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
4084    %R prints the value of REGISTER_PREFIX.
4085    %L prints the value of LOCAL_LABEL_PREFIX.
4086    %U prints the value of USER_LABEL_PREFIX.
4087    %I prints the value of IMMEDIATE_PREFIX.
4088    %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
4089    Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
4090 
4091    We handle alternate assembler dialects here, just like output_asm_insn.  */
4092 
4093 void
asm_fprintf(FILE * file,const char * p,...)4094 asm_fprintf (FILE *file, const char *p, ...)
4095 {
4096   char buf[10];
4097   char *q, c;
4098 #ifdef ASSEMBLER_DIALECT
4099   int dialect = 0;
4100 #endif
4101   va_list argptr;
4102 
4103   va_start (argptr, p);
4104 
4105   buf[0] = '%';
4106 
4107   while ((c = *p++))
4108     switch (c)
4109       {
4110 #ifdef ASSEMBLER_DIALECT
4111       case '{':
4112       case '}':
4113       case '|':
4114 	p = do_assembler_dialects (p, &dialect);
4115 	break;
4116 #endif
4117 
4118       case '%':
4119 	c = *p++;
4120 	q = &buf[1];
4121 	while (strchr ("-+ #0", c))
4122 	  {
4123 	    *q++ = c;
4124 	    c = *p++;
4125 	  }
4126 	while (ISDIGIT (c) || c == '.')
4127 	  {
4128 	    *q++ = c;
4129 	    c = *p++;
4130 	  }
4131 	switch (c)
4132 	  {
4133 	  case '%':
4134 	    putc ('%', file);
4135 	    break;
4136 
4137 	  case 'd':  case 'i':  case 'u':
4138 	  case 'x':  case 'X':  case 'o':
4139 	  case 'c':
4140 	    *q++ = c;
4141 	    *q = 0;
4142 	    fprintf (file, buf, va_arg (argptr, int));
4143 	    break;
4144 
4145 	  case 'w':
4146 	    /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4147 	       'o' cases, but we do not check for those cases.  It
4148 	       means that the value is a HOST_WIDE_INT, which may be
4149 	       either `long' or `long long'.  */
4150 	    memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4151 	    q += strlen (HOST_WIDE_INT_PRINT);
4152 	    *q++ = *p++;
4153 	    *q = 0;
4154 	    fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4155 	    break;
4156 
4157 	  case 'l':
4158 	    *q++ = c;
4159 #ifdef HAVE_LONG_LONG
4160 	    if (*p == 'l')
4161 	      {
4162 		*q++ = *p++;
4163 		*q++ = *p++;
4164 		*q = 0;
4165 		fprintf (file, buf, va_arg (argptr, long long));
4166 	      }
4167 	    else
4168 #endif
4169 	      {
4170 		*q++ = *p++;
4171 		*q = 0;
4172 		fprintf (file, buf, va_arg (argptr, long));
4173 	      }
4174 
4175 	    break;
4176 
4177 	  case 's':
4178 	    *q++ = c;
4179 	    *q = 0;
4180 	    fprintf (file, buf, va_arg (argptr, char *));
4181 	    break;
4182 
4183 	  case 'O':
4184 #ifdef ASM_OUTPUT_OPCODE
4185 	    ASM_OUTPUT_OPCODE (asm_out_file, p);
4186 #endif
4187 	    break;
4188 
4189 	  case 'R':
4190 #ifdef REGISTER_PREFIX
4191 	    fprintf (file, "%s", REGISTER_PREFIX);
4192 #endif
4193 	    break;
4194 
4195 	  case 'I':
4196 #ifdef IMMEDIATE_PREFIX
4197 	    fprintf (file, "%s", IMMEDIATE_PREFIX);
4198 #endif
4199 	    break;
4200 
4201 	  case 'L':
4202 #ifdef LOCAL_LABEL_PREFIX
4203 	    fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4204 #endif
4205 	    break;
4206 
4207 	  case 'U':
4208 	    fputs (user_label_prefix, file);
4209 	    break;
4210 
4211 #ifdef ASM_FPRINTF_EXTENSIONS
4212 	    /* Uppercase letters are reserved for general use by asm_fprintf
4213 	       and so are not available to target specific code.  In order to
4214 	       prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4215 	       they are defined here.  As they get turned into real extensions
4216 	       to asm_fprintf they should be removed from this list.  */
4217 	  case 'A': case 'B': case 'C': case 'D': case 'E':
4218 	  case 'F': case 'G': case 'H': case 'J': case 'K':
4219 	  case 'M': case 'N': case 'P': case 'Q': case 'S':
4220 	  case 'T': case 'V': case 'W': case 'Y': case 'Z':
4221 	    break;
4222 
4223 	  ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4224 #endif
4225 	  default:
4226 	    gcc_unreachable ();
4227 	  }
4228 	break;
4229 
4230       default:
4231 	putc (c, file);
4232       }
4233   va_end (argptr);
4234 }
4235 
4236 /* Return nonzero if this function has no function calls.  */
4237 
4238 int
leaf_function_p(void)4239 leaf_function_p (void)
4240 {
4241   rtx insn;
4242 
4243   if (crtl->profile || profile_arc_flag)
4244     return 0;
4245 
4246   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4247     {
4248       if (CALL_P (insn)
4249 	  && ! SIBLING_CALL_P (insn))
4250 	return 0;
4251       if (NONJUMP_INSN_P (insn)
4252 	  && GET_CODE (PATTERN (insn)) == SEQUENCE
4253 	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4254 	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4255 	return 0;
4256     }
4257 
4258   return 1;
4259 }
4260 
4261 /* Return 1 if branch is a forward branch.
4262    Uses insn_shuid array, so it works only in the final pass.  May be used by
4263    output templates to customary add branch prediction hints.
4264  */
4265 int
final_forward_branch_p(rtx insn)4266 final_forward_branch_p (rtx insn)
4267 {
4268   int insn_id, label_id;
4269 
4270   gcc_assert (uid_shuid);
4271   insn_id = INSN_SHUID (insn);
4272   label_id = INSN_SHUID (JUMP_LABEL (insn));
4273   /* We've hit some insns that does not have id information available.  */
4274   gcc_assert (insn_id && label_id);
4275   return insn_id < label_id;
4276 }
4277 
4278 /* On some machines, a function with no call insns
4279    can run faster if it doesn't create its own register window.
4280    When output, the leaf function should use only the "output"
4281    registers.  Ordinarily, the function would be compiled to use
4282    the "input" registers to find its arguments; it is a candidate
4283    for leaf treatment if it uses only the "input" registers.
4284    Leaf function treatment means renumbering so the function
4285    uses the "output" registers instead.  */
4286 
4287 #ifdef LEAF_REGISTERS
4288 
4289 /* Return 1 if this function uses only the registers that can be
4290    safely renumbered.  */
4291 
4292 int
only_leaf_regs_used(void)4293 only_leaf_regs_used (void)
4294 {
4295   int i;
4296   const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4297 
4298   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4299     if ((df_regs_ever_live_p (i) || global_regs[i])
4300 	&& ! permitted_reg_in_leaf_functions[i])
4301       return 0;
4302 
4303   if (crtl->uses_pic_offset_table
4304       && pic_offset_table_rtx != 0
4305       && REG_P (pic_offset_table_rtx)
4306       && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4307     return 0;
4308 
4309   return 1;
4310 }
4311 
4312 /* Scan all instructions and renumber all registers into those
4313    available in leaf functions.  */
4314 
4315 static void
leaf_renumber_regs(rtx first)4316 leaf_renumber_regs (rtx first)
4317 {
4318   rtx insn;
4319 
4320   /* Renumber only the actual patterns.
4321      The reg-notes can contain frame pointer refs,
4322      and renumbering them could crash, and should not be needed.  */
4323   for (insn = first; insn; insn = NEXT_INSN (insn))
4324     if (INSN_P (insn))
4325       leaf_renumber_regs_insn (PATTERN (insn));
4326 }
4327 
4328 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4329    available in leaf functions.  */
4330 
4331 void
leaf_renumber_regs_insn(rtx in_rtx)4332 leaf_renumber_regs_insn (rtx in_rtx)
4333 {
4334   int i, j;
4335   const char *format_ptr;
4336 
4337   if (in_rtx == 0)
4338     return;
4339 
4340   /* Renumber all input-registers into output-registers.
4341      renumbered_regs would be 1 for an output-register;
4342      they  */
4343 
4344   if (REG_P (in_rtx))
4345     {
4346       int newreg;
4347 
4348       /* Don't renumber the same reg twice.  */
4349       if (in_rtx->used)
4350 	return;
4351 
4352       newreg = REGNO (in_rtx);
4353       /* Don't try to renumber pseudo regs.  It is possible for a pseudo reg
4354 	 to reach here as part of a REG_NOTE.  */
4355       if (newreg >= FIRST_PSEUDO_REGISTER)
4356 	{
4357 	  in_rtx->used = 1;
4358 	  return;
4359 	}
4360       newreg = LEAF_REG_REMAP (newreg);
4361       gcc_assert (newreg >= 0);
4362       df_set_regs_ever_live (REGNO (in_rtx), false);
4363       df_set_regs_ever_live (newreg, true);
4364       SET_REGNO (in_rtx, newreg);
4365       in_rtx->used = 1;
4366     }
4367 
4368   if (INSN_P (in_rtx))
4369     {
4370       /* Inside a SEQUENCE, we find insns.
4371 	 Renumber just the patterns of these insns,
4372 	 just as we do for the top-level insns.  */
4373       leaf_renumber_regs_insn (PATTERN (in_rtx));
4374       return;
4375     }
4376 
4377   format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4378 
4379   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4380     switch (*format_ptr++)
4381       {
4382       case 'e':
4383 	leaf_renumber_regs_insn (XEXP (in_rtx, i));
4384 	break;
4385 
4386       case 'E':
4387 	if (NULL != XVEC (in_rtx, i))
4388 	  {
4389 	    for (j = 0; j < XVECLEN (in_rtx, i); j++)
4390 	      leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4391 	  }
4392 	break;
4393 
4394       case 'S':
4395       case 's':
4396       case '0':
4397       case 'i':
4398       case 'w':
4399       case 'n':
4400       case 'u':
4401 	break;
4402 
4403       default:
4404 	gcc_unreachable ();
4405       }
4406 }
4407 #endif
4408 
4409 /* Turn the RTL into assembly.  */
4410 static unsigned int
rest_of_handle_final(void)4411 rest_of_handle_final (void)
4412 {
4413   rtx x;
4414   const char *fnname;
4415 
4416   /* Get the function's name, as described by its RTL.  This may be
4417      different from the DECL_NAME name used in the source file.  */
4418 
4419   x = DECL_RTL (current_function_decl);
4420   gcc_assert (MEM_P (x));
4421   x = XEXP (x, 0);
4422   gcc_assert (GET_CODE (x) == SYMBOL_REF);
4423   fnname = XSTR (x, 0);
4424 
4425   assemble_start_function (current_function_decl, fnname);
4426   final_start_function (get_insns (), asm_out_file, optimize);
4427   final (get_insns (), asm_out_file, optimize);
4428   final_end_function ();
4429 
4430   /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4431      directive that closes the procedure descriptor.  Similarly, for x64 SEH.
4432      Otherwise it's not strictly necessary, but it doesn't hurt either.  */
4433   output_function_exception_table (fnname);
4434 
4435   assemble_end_function (current_function_decl, fnname);
4436 
4437   user_defined_section_attribute = false;
4438 
4439   /* Free up reg info memory.  */
4440   free_reg_info ();
4441 
4442   if (! quiet_flag)
4443     fflush (asm_out_file);
4444 
4445   /* Write DBX symbols if requested.  */
4446 
4447   /* Note that for those inline functions where we don't initially
4448      know for certain that we will be generating an out-of-line copy,
4449      the first invocation of this routine (rest_of_compilation) will
4450      skip over this code by doing a `goto exit_rest_of_compilation;'.
4451      Later on, wrapup_global_declarations will (indirectly) call
4452      rest_of_compilation again for those inline functions that need
4453      to have out-of-line copies generated.  During that call, we
4454      *will* be routed past here.  */
4455 
4456   timevar_push (TV_SYMOUT);
4457   if (!DECL_IGNORED_P (current_function_decl))
4458     debug_hooks->function_decl (current_function_decl);
4459   timevar_pop (TV_SYMOUT);
4460 
4461   /* Release the blocks that are linked to DECL_INITIAL() to free the memory.  */
4462   DECL_INITIAL (current_function_decl) = error_mark_node;
4463 
4464   if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4465       && targetm.have_ctors_dtors)
4466     targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4467 				 decl_init_priority_lookup
4468 				   (current_function_decl));
4469   if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4470       && targetm.have_ctors_dtors)
4471     targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4472 				decl_fini_priority_lookup
4473 				  (current_function_decl));
4474   return 0;
4475 }
4476 
4477 namespace {
4478 
4479 const pass_data pass_data_final =
4480 {
4481   RTL_PASS, /* type */
4482   "final", /* name */
4483   OPTGROUP_NONE, /* optinfo_flags */
4484   false, /* has_gate */
4485   true, /* has_execute */
4486   TV_FINAL, /* tv_id */
4487   0, /* properties_required */
4488   0, /* properties_provided */
4489   0, /* properties_destroyed */
4490   0, /* todo_flags_start */
4491   0, /* todo_flags_finish */
4492 };
4493 
4494 class pass_final : public rtl_opt_pass
4495 {
4496 public:
pass_final(gcc::context * ctxt)4497   pass_final (gcc::context *ctxt)
4498     : rtl_opt_pass (pass_data_final, ctxt)
4499   {}
4500 
4501   /* opt_pass methods: */
execute()4502   unsigned int execute () { return rest_of_handle_final (); }
4503 
4504 }; // class pass_final
4505 
4506 } // anon namespace
4507 
4508 rtl_opt_pass *
make_pass_final(gcc::context * ctxt)4509 make_pass_final (gcc::context *ctxt)
4510 {
4511   return new pass_final (ctxt);
4512 }
4513 
4514 
4515 static unsigned int
rest_of_handle_shorten_branches(void)4516 rest_of_handle_shorten_branches (void)
4517 {
4518   /* Shorten branches.  */
4519   shorten_branches (get_insns ());
4520   return 0;
4521 }
4522 
4523 namespace {
4524 
4525 const pass_data pass_data_shorten_branches =
4526 {
4527   RTL_PASS, /* type */
4528   "shorten", /* name */
4529   OPTGROUP_NONE, /* optinfo_flags */
4530   false, /* has_gate */
4531   true, /* has_execute */
4532   TV_SHORTEN_BRANCH, /* tv_id */
4533   0, /* properties_required */
4534   0, /* properties_provided */
4535   0, /* properties_destroyed */
4536   0, /* todo_flags_start */
4537   0, /* todo_flags_finish */
4538 };
4539 
4540 class pass_shorten_branches : public rtl_opt_pass
4541 {
4542 public:
pass_shorten_branches(gcc::context * ctxt)4543   pass_shorten_branches (gcc::context *ctxt)
4544     : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4545   {}
4546 
4547   /* opt_pass methods: */
execute()4548   unsigned int execute () { return rest_of_handle_shorten_branches (); }
4549 
4550 }; // class pass_shorten_branches
4551 
4552 } // anon namespace
4553 
4554 rtl_opt_pass *
make_pass_shorten_branches(gcc::context * ctxt)4555 make_pass_shorten_branches (gcc::context *ctxt)
4556 {
4557   return new pass_shorten_branches (ctxt);
4558 }
4559 
4560 
4561 static unsigned int
rest_of_clean_state(void)4562 rest_of_clean_state (void)
4563 {
4564   rtx insn, next;
4565   FILE *final_output = NULL;
4566   int save_unnumbered = flag_dump_unnumbered;
4567   int save_noaddr = flag_dump_noaddr;
4568 
4569   if (flag_dump_final_insns)
4570     {
4571       final_output = fopen (flag_dump_final_insns, "a");
4572       if (!final_output)
4573 	{
4574 	  error ("could not open final insn dump file %qs: %m",
4575 		 flag_dump_final_insns);
4576 	  flag_dump_final_insns = NULL;
4577 	}
4578       else
4579 	{
4580 	  flag_dump_noaddr = flag_dump_unnumbered = 1;
4581 	  if (flag_compare_debug_opt || flag_compare_debug)
4582 	    dump_flags |= TDF_NOUID;
4583 	  dump_function_header (final_output, current_function_decl,
4584 				dump_flags);
4585 	  final_insns_dump_p = true;
4586 
4587 	  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4588 	    if (LABEL_P (insn))
4589 	      INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4590 	    else
4591 	      {
4592 		if (NOTE_P (insn))
4593 		  set_block_for_insn (insn, NULL);
4594 		INSN_UID (insn) = 0;
4595 	      }
4596 	}
4597     }
4598 
4599   /* It is very important to decompose the RTL instruction chain here:
4600      debug information keeps pointing into CODE_LABEL insns inside the function
4601      body.  If these remain pointing to the other insns, we end up preserving
4602      whole RTL chain and attached detailed debug info in memory.  */
4603   for (insn = get_insns (); insn; insn = next)
4604     {
4605       next = NEXT_INSN (insn);
4606       NEXT_INSN (insn) = NULL;
4607       PREV_INSN (insn) = NULL;
4608 
4609       if (final_output
4610 	  && (!NOTE_P (insn) ||
4611 	      (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4612 	       && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4613 	       && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4614 	       && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4615 	       && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4616 	print_rtl_single (final_output, insn);
4617     }
4618 
4619   if (final_output)
4620     {
4621       flag_dump_noaddr = save_noaddr;
4622       flag_dump_unnumbered = save_unnumbered;
4623       final_insns_dump_p = false;
4624 
4625       if (fclose (final_output))
4626 	{
4627 	  error ("could not close final insn dump file %qs: %m",
4628 		 flag_dump_final_insns);
4629 	  flag_dump_final_insns = NULL;
4630 	}
4631     }
4632 
4633   /* In case the function was not output,
4634      don't leave any temporary anonymous types
4635      queued up for sdb output.  */
4636 #ifdef SDB_DEBUGGING_INFO
4637   if (write_symbols == SDB_DEBUG)
4638     sdbout_types (NULL_TREE);
4639 #endif
4640 
4641   flag_rerun_cse_after_global_opts = 0;
4642   reload_completed = 0;
4643   epilogue_completed = 0;
4644 #ifdef STACK_REGS
4645   regstack_completed = 0;
4646 #endif
4647 
4648   /* Clear out the insn_length contents now that they are no
4649      longer valid.  */
4650   init_insn_lengths ();
4651 
4652   /* Show no temporary slots allocated.  */
4653   init_temp_slots ();
4654 
4655   free_bb_for_insn ();
4656 
4657   delete_tree_ssa ();
4658 
4659   /* We can reduce stack alignment on call site only when we are sure that
4660      the function body just produced will be actually used in the final
4661      executable.  */
4662   if (decl_binds_to_current_def_p (current_function_decl))
4663     {
4664       unsigned int pref = crtl->preferred_stack_boundary;
4665       if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4666         pref = crtl->stack_alignment_needed;
4667       cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4668         = pref;
4669     }
4670 
4671   /* Make sure volatile mem refs aren't considered valid operands for
4672      arithmetic insns.  We must call this here if this is a nested inline
4673      function, since the above code leaves us in the init_recog state,
4674      and the function context push/pop code does not save/restore volatile_ok.
4675 
4676      ??? Maybe it isn't necessary for expand_start_function to call this
4677      anymore if we do it here?  */
4678 
4679   init_recog_no_volatile ();
4680 
4681   /* We're done with this function.  Free up memory if we can.  */
4682   free_after_parsing (cfun);
4683   free_after_compilation (cfun);
4684   return 0;
4685 }
4686 
4687 namespace {
4688 
4689 const pass_data pass_data_clean_state =
4690 {
4691   RTL_PASS, /* type */
4692   "*clean_state", /* name */
4693   OPTGROUP_NONE, /* optinfo_flags */
4694   false, /* has_gate */
4695   true, /* has_execute */
4696   TV_FINAL, /* tv_id */
4697   0, /* properties_required */
4698   0, /* properties_provided */
4699   PROP_rtl, /* properties_destroyed */
4700   0, /* todo_flags_start */
4701   0, /* todo_flags_finish */
4702 };
4703 
4704 class pass_clean_state : public rtl_opt_pass
4705 {
4706 public:
pass_clean_state(gcc::context * ctxt)4707   pass_clean_state (gcc::context *ctxt)
4708     : rtl_opt_pass (pass_data_clean_state, ctxt)
4709   {}
4710 
4711   /* opt_pass methods: */
execute()4712   unsigned int execute () { return rest_of_clean_state (); }
4713 
4714 }; // class pass_clean_state
4715 
4716 } // anon namespace
4717 
4718 rtl_opt_pass *
make_pass_clean_state(gcc::context * ctxt)4719 make_pass_clean_state (gcc::context *ctxt)
4720 {
4721   return new pass_clean_state (ctxt);
4722 }
4723