xref: /openbsd/gnu/gcc/gcc/final.c (revision 404b540a)
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2    Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3    1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA.  */
22 
23 /* This is the final pass of the compiler.
24    It looks at the rtl code for a function and outputs assembler code.
25 
26    Call `final_start_function' to output the assembler code for function entry,
27    `final' to output assembler code for some RTL code,
28    `final_end_function' to output assembler code for function exit.
29    If a function is compiled in several pieces, each piece is
30    output separately with `final'.
31 
32    Some optimizations are also done at this level.
33    Move instructions that were made unnecessary by good register allocation
34    are detected and omitted from the output.  (Though most of these
35    are removed by the last jump pass.)
36 
37    Instructions to set the condition codes are omitted when it can be
38    seen that the condition codes already had the desired values.
39 
40    In some cases it is sufficient if the inherited condition codes
41    have related values, but this may require the following insn
42    (the one that tests the condition codes) to be modified.
43 
44    The code for the function prologue and epilogue are generated
45    directly in assembler by the target functions function_prologue and
46    function_epilogue.  Those instructions never exist as rtl.  */
47 
48 #include "config.h"
49 #include "system.h"
50 #include "coretypes.h"
51 #include "tm.h"
52 
53 #include "tree.h"
54 #include "rtl.h"
55 #include "tm_p.h"
56 #include "regs.h"
57 #include "insn-config.h"
58 #include "insn-attr.h"
59 #include "recog.h"
60 #include "conditions.h"
61 #include "flags.h"
62 #include "real.h"
63 #include "hard-reg-set.h"
64 #include "output.h"
65 #include "except.h"
66 #include "function.h"
67 #include "toplev.h"
68 #include "reload.h"
69 #include "intl.h"
70 #include "basic-block.h"
71 #include "target.h"
72 #include "debug.h"
73 #include "expr.h"
74 #include "cfglayout.h"
75 #include "tree-pass.h"
76 #include "timevar.h"
77 #include "cgraph.h"
78 #include "coverage.h"
79 
80 #ifdef XCOFF_DEBUGGING_INFO
81 #include "xcoffout.h"		/* Needed for external data
82 				   declarations for e.g. AIX 4.x.  */
83 #endif
84 
85 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
86 #include "dwarf2out.h"
87 #endif
88 
89 #ifdef DBX_DEBUGGING_INFO
90 #include "dbxout.h"
91 #endif
92 
93 #ifdef SDB_DEBUGGING_INFO
94 #include "sdbout.h"
95 #endif
96 
97 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist.  So define a
98    null default for it to save conditionalization later.  */
99 #ifndef CC_STATUS_INIT
100 #define CC_STATUS_INIT
101 #endif
102 
103 /* How to start an assembler comment.  */
104 #ifndef ASM_COMMENT_START
105 #define ASM_COMMENT_START ";#"
106 #endif
107 
108 /* Is the given character a logical line separator for the assembler?  */
109 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
110 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C) ((C) == ';')
111 #endif
112 
113 #ifndef JUMP_TABLES_IN_TEXT_SECTION
114 #define JUMP_TABLES_IN_TEXT_SECTION 0
115 #endif
116 
117 /* Bitflags used by final_scan_insn.  */
118 #define SEEN_BB		1
119 #define SEEN_NOTE	2
120 #define SEEN_EMITTED	4
121 
122 /* Last insn processed by final_scan_insn.  */
123 static rtx debug_insn;
124 rtx current_output_insn;
125 
126 /* Line number of last NOTE.  */
127 static int last_linenum;
128 
129 /* Highest line number in current block.  */
130 static int high_block_linenum;
131 
132 /* Likewise for function.  */
133 static int high_function_linenum;
134 
135 /* Filename of last NOTE.  */
136 static const char *last_filename;
137 
138 /* Whether to force emission of a line note before the next insn.  */
139 static bool force_source_line = false;
140 
141 extern const int length_unit_log; /* This is defined in insn-attrtab.c.  */
142 
143 /* Nonzero while outputting an `asm' with operands.
144    This means that inconsistencies are the user's fault, so don't die.
145    The precise value is the insn being output, to pass to error_for_asm.  */
146 rtx this_is_asm_operands;
147 
148 /* Number of operands of this insn, for an `asm' with operands.  */
149 static unsigned int insn_noperands;
150 
151 /* Compare optimization flag.  */
152 
153 static rtx last_ignored_compare = 0;
154 
155 /* Assign a unique number to each insn that is output.
156    This can be used to generate unique local labels.  */
157 
158 static int insn_counter = 0;
159 
160 #ifdef HAVE_cc0
161 /* This variable contains machine-dependent flags (defined in tm.h)
162    set and examined by output routines
163    that describe how to interpret the condition codes properly.  */
164 
165 CC_STATUS cc_status;
166 
167 /* During output of an insn, this contains a copy of cc_status
168    from before the insn.  */
169 
170 CC_STATUS cc_prev_status;
171 #endif
172 
173 /* Indexed by hardware reg number, is 1 if that register is ever
174    used in the current function.
175 
176    In life_analysis, or in stupid_life_analysis, this is set
177    up to record the hard regs used explicitly.  Reload adds
178    in the hard regs used for holding pseudo regs.  Final uses
179    it to generate the code in the function prologue and epilogue
180    to save and restore registers as needed.  */
181 
182 char regs_ever_live[FIRST_PSEUDO_REGISTER];
183 
184 /* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
185    Unlike regs_ever_live, elements of this array corresponding to
186    eliminable regs like the frame pointer are set if an asm sets them.  */
187 
188 char regs_asm_clobbered[FIRST_PSEUDO_REGISTER];
189 
190 /* Nonzero means current function must be given a frame pointer.
191    Initialized in function.c to 0.  Set only in reload1.c as per
192    the needs of the function.  */
193 
194 int frame_pointer_needed;
195 
196 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen.  */
197 
198 static int block_depth;
199 
200 /* Nonzero if have enabled APP processing of our assembler output.  */
201 
202 static int app_on;
203 
204 /* If we are outputting an insn sequence, this contains the sequence rtx.
205    Zero otherwise.  */
206 
207 rtx final_sequence;
208 
209 #ifdef ASSEMBLER_DIALECT
210 
211 /* Number of the assembler dialect to use, starting at 0.  */
212 static int dialect_number;
213 #endif
214 
215 #ifdef HAVE_conditional_execution
216 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern.  */
217 rtx current_insn_predicate;
218 #endif
219 
220 #ifdef HAVE_ATTR_length
221 static int asm_insn_count (rtx);
222 #endif
223 static void profile_function (FILE *);
224 static void profile_after_prologue (FILE *);
225 static bool notice_source_line (rtx);
226 static rtx walk_alter_subreg (rtx *);
227 static void output_asm_name (void);
228 static void output_alternate_entry_point (FILE *, rtx);
229 static tree get_mem_expr_from_op (rtx, int *);
230 static void output_asm_operand_names (rtx *, int *, int);
231 static void output_operand (rtx, int);
232 #ifdef LEAF_REGISTERS
233 static void leaf_renumber_regs (rtx);
234 #endif
235 #ifdef HAVE_cc0
236 static int alter_cond (rtx);
237 #endif
238 #ifndef ADDR_VEC_ALIGN
239 static int final_addr_vec_align (rtx);
240 #endif
241 #ifdef HAVE_ATTR_length
242 static int align_fuzz (rtx, rtx, int, unsigned);
243 #endif
244 
245 /* Initialize data in final at the beginning of a compilation.  */
246 
247 void
init_final(const char * filename ATTRIBUTE_UNUSED)248 init_final (const char *filename ATTRIBUTE_UNUSED)
249 {
250   app_on = 0;
251   final_sequence = 0;
252 
253 #ifdef ASSEMBLER_DIALECT
254   dialect_number = ASSEMBLER_DIALECT;
255 #endif
256 }
257 
258 /* Default target function prologue and epilogue assembler output.
259 
260    If not overridden for epilogue code, then the function body itself
261    contains return instructions wherever needed.  */
262 void
default_function_pro_epilogue(FILE * file ATTRIBUTE_UNUSED,HOST_WIDE_INT size ATTRIBUTE_UNUSED)263 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
264 			       HOST_WIDE_INT size ATTRIBUTE_UNUSED)
265 {
266 }
267 
268 /* Default target hook that outputs nothing to a stream.  */
269 void
no_asm_to_stream(FILE * file ATTRIBUTE_UNUSED)270 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
271 {
272 }
273 
274 /* Enable APP processing of subsequent output.
275    Used before the output from an `asm' statement.  */
276 
277 void
app_enable(void)278 app_enable (void)
279 {
280   if (! app_on)
281     {
282       fputs (ASM_APP_ON, asm_out_file);
283       app_on = 1;
284     }
285 }
286 
287 /* Disable APP processing of subsequent output.
288    Called from varasm.c before most kinds of output.  */
289 
290 void
app_disable(void)291 app_disable (void)
292 {
293   if (app_on)
294     {
295       fputs (ASM_APP_OFF, asm_out_file);
296       app_on = 0;
297     }
298 }
299 
300 /* Return the number of slots filled in the current
301    delayed branch sequence (we don't count the insn needing the
302    delay slot).   Zero if not in a delayed branch sequence.  */
303 
304 #ifdef DELAY_SLOTS
305 int
dbr_sequence_length(void)306 dbr_sequence_length (void)
307 {
308   if (final_sequence != 0)
309     return XVECLEN (final_sequence, 0) - 1;
310   else
311     return 0;
312 }
313 #endif
314 
315 /* The next two pages contain routines used to compute the length of an insn
316    and to shorten branches.  */
317 
318 /* Arrays for insn lengths, and addresses.  The latter is referenced by
319    `insn_current_length'.  */
320 
321 static int *insn_lengths;
322 
323 varray_type insn_addresses_;
324 
325 /* Max uid for which the above arrays are valid.  */
326 static int insn_lengths_max_uid;
327 
328 /* Address of insn being processed.  Used by `insn_current_length'.  */
329 int insn_current_address;
330 
331 /* Address of insn being processed in previous iteration.  */
332 int insn_last_address;
333 
334 /* known invariant alignment of insn being processed.  */
335 int insn_current_align;
336 
337 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
338    gives the next following alignment insn that increases the known
339    alignment, or NULL_RTX if there is no such insn.
340    For any alignment obtained this way, we can again index uid_align with
341    its uid to obtain the next following align that in turn increases the
342    alignment, till we reach NULL_RTX; the sequence obtained this way
343    for each insn we'll call the alignment chain of this insn in the following
344    comments.  */
345 
346 struct label_alignment
347 {
348   short alignment;
349   short max_skip;
350 };
351 
352 static rtx *uid_align;
353 static int *uid_shuid;
354 static struct label_alignment *label_align;
355 
356 /* Indicate that branch shortening hasn't yet been done.  */
357 
358 void
init_insn_lengths(void)359 init_insn_lengths (void)
360 {
361   if (uid_shuid)
362     {
363       free (uid_shuid);
364       uid_shuid = 0;
365     }
366   if (insn_lengths)
367     {
368       free (insn_lengths);
369       insn_lengths = 0;
370       insn_lengths_max_uid = 0;
371     }
372 #ifdef HAVE_ATTR_length
373   INSN_ADDRESSES_FREE ();
374 #endif
375   if (uid_align)
376     {
377       free (uid_align);
378       uid_align = 0;
379     }
380 }
381 
382 /* Obtain the current length of an insn.  If branch shortening has been done,
383    get its actual length.  Otherwise, use FALLBACK_FN to calculate the
384    length.  */
385 static inline int
get_attr_length_1(rtx insn ATTRIBUTE_UNUSED,int (* fallback_fn)(rtx)ATTRIBUTE_UNUSED)386 get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED,
387 		   int (*fallback_fn) (rtx) ATTRIBUTE_UNUSED)
388 {
389 #ifdef HAVE_ATTR_length
390   rtx body;
391   int i;
392   int length = 0;
393 
394   if (insn_lengths_max_uid > INSN_UID (insn))
395     return insn_lengths[INSN_UID (insn)];
396   else
397     switch (GET_CODE (insn))
398       {
399       case NOTE:
400       case BARRIER:
401       case CODE_LABEL:
402 	return 0;
403 
404       case CALL_INSN:
405 	length = fallback_fn (insn);
406 	break;
407 
408       case JUMP_INSN:
409 	body = PATTERN (insn);
410 	if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
411 	  {
412 	    /* Alignment is machine-dependent and should be handled by
413 	       ADDR_VEC_ALIGN.  */
414 	  }
415 	else
416 	  length = fallback_fn (insn);
417 	break;
418 
419       case INSN:
420 	body = PATTERN (insn);
421 	if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
422 	  return 0;
423 
424 	else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
425 	  length = asm_insn_count (body) * fallback_fn (insn);
426 	else if (GET_CODE (body) == SEQUENCE)
427 	  for (i = 0; i < XVECLEN (body, 0); i++)
428 	    length += get_attr_length (XVECEXP (body, 0, i));
429 	else
430 	  length = fallback_fn (insn);
431 	break;
432 
433       default:
434 	break;
435       }
436 
437 #ifdef ADJUST_INSN_LENGTH
438   ADJUST_INSN_LENGTH (insn, length);
439 #endif
440   return length;
441 #else /* not HAVE_ATTR_length */
442   return 0;
443 #define insn_default_length 0
444 #define insn_min_length 0
445 #endif /* not HAVE_ATTR_length */
446 }
447 
448 /* Obtain the current length of an insn.  If branch shortening has been done,
449    get its actual length.  Otherwise, get its maximum length.  */
450 int
get_attr_length(rtx insn)451 get_attr_length (rtx insn)
452 {
453   return get_attr_length_1 (insn, insn_default_length);
454 }
455 
456 /* Obtain the current length of an insn.  If branch shortening has been done,
457    get its actual length.  Otherwise, get its minimum length.  */
458 int
get_attr_min_length(rtx insn)459 get_attr_min_length (rtx insn)
460 {
461   return get_attr_length_1 (insn, insn_min_length);
462 }
463 
464 /* Code to handle alignment inside shorten_branches.  */
465 
466 /* Here is an explanation how the algorithm in align_fuzz can give
467    proper results:
468 
469    Call a sequence of instructions beginning with alignment point X
470    and continuing until the next alignment point `block X'.  When `X'
471    is used in an expression, it means the alignment value of the
472    alignment point.
473 
474    Call the distance between the start of the first insn of block X, and
475    the end of the last insn of block X `IX', for the `inner size of X'.
476    This is clearly the sum of the instruction lengths.
477 
478    Likewise with the next alignment-delimited block following X, which we
479    shall call block Y.
480 
481    Call the distance between the start of the first insn of block X, and
482    the start of the first insn of block Y `OX', for the `outer size of X'.
483 
484    The estimated padding is then OX - IX.
485 
486    OX can be safely estimated as
487 
488            if (X >= Y)
489                    OX = round_up(IX, Y)
490            else
491                    OX = round_up(IX, X) + Y - X
492 
493    Clearly est(IX) >= real(IX), because that only depends on the
494    instruction lengths, and those being overestimated is a given.
495 
496    Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
497    we needn't worry about that when thinking about OX.
498 
499    When X >= Y, the alignment provided by Y adds no uncertainty factor
500    for branch ranges starting before X, so we can just round what we have.
501    But when X < Y, we don't know anything about the, so to speak,
502    `middle bits', so we have to assume the worst when aligning up from an
503    address mod X to one mod Y, which is Y - X.  */
504 
505 #ifndef LABEL_ALIGN
506 #define LABEL_ALIGN(LABEL) align_labels_log
507 #endif
508 
509 #ifndef LABEL_ALIGN_MAX_SKIP
510 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
511 #endif
512 
513 #ifndef LOOP_ALIGN
514 #define LOOP_ALIGN(LABEL) align_loops_log
515 #endif
516 
517 #ifndef LOOP_ALIGN_MAX_SKIP
518 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
519 #endif
520 
521 #ifndef LABEL_ALIGN_AFTER_BARRIER
522 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
523 #endif
524 
525 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
526 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
527 #endif
528 
529 #ifndef JUMP_ALIGN
530 #define JUMP_ALIGN(LABEL) align_jumps_log
531 #endif
532 
533 #ifndef JUMP_ALIGN_MAX_SKIP
534 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
535 #endif
536 
537 #ifndef ADDR_VEC_ALIGN
538 static int
final_addr_vec_align(rtx addr_vec)539 final_addr_vec_align (rtx addr_vec)
540 {
541   int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
542 
543   if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
544     align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
545   return exact_log2 (align);
546 
547 }
548 
549 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
550 #endif
551 
552 #ifndef INSN_LENGTH_ALIGNMENT
553 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
554 #endif
555 
556 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
557 
558 static int min_labelno, max_labelno;
559 
560 #define LABEL_TO_ALIGNMENT(LABEL) \
561   (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
562 
563 #define LABEL_TO_MAX_SKIP(LABEL) \
564   (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
565 
566 /* For the benefit of port specific code do this also as a function.  */
567 
568 int
label_to_alignment(rtx label)569 label_to_alignment (rtx label)
570 {
571   return LABEL_TO_ALIGNMENT (label);
572 }
573 
574 #ifdef HAVE_ATTR_length
575 /* The differences in addresses
576    between a branch and its target might grow or shrink depending on
577    the alignment the start insn of the range (the branch for a forward
578    branch or the label for a backward branch) starts out on; if these
579    differences are used naively, they can even oscillate infinitely.
580    We therefore want to compute a 'worst case' address difference that
581    is independent of the alignment the start insn of the range end
582    up on, and that is at least as large as the actual difference.
583    The function align_fuzz calculates the amount we have to add to the
584    naively computed difference, by traversing the part of the alignment
585    chain of the start insn of the range that is in front of the end insn
586    of the range, and considering for each alignment the maximum amount
587    that it might contribute to a size increase.
588 
589    For casesi tables, we also want to know worst case minimum amounts of
590    address difference, in case a machine description wants to introduce
591    some common offset that is added to all offsets in a table.
592    For this purpose, align_fuzz with a growth argument of 0 computes the
593    appropriate adjustment.  */
594 
595 /* Compute the maximum delta by which the difference of the addresses of
596    START and END might grow / shrink due to a different address for start
597    which changes the size of alignment insns between START and END.
598    KNOWN_ALIGN_LOG is the alignment known for START.
599    GROWTH should be ~0 if the objective is to compute potential code size
600    increase, and 0 if the objective is to compute potential shrink.
601    The return value is undefined for any other value of GROWTH.  */
602 
603 static int
align_fuzz(rtx start,rtx end,int known_align_log,unsigned int growth)604 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
605 {
606   int uid = INSN_UID (start);
607   rtx align_label;
608   int known_align = 1 << known_align_log;
609   int end_shuid = INSN_SHUID (end);
610   int fuzz = 0;
611 
612   for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
613     {
614       int align_addr, new_align;
615 
616       uid = INSN_UID (align_label);
617       align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
618       if (uid_shuid[uid] > end_shuid)
619 	break;
620       known_align_log = LABEL_TO_ALIGNMENT (align_label);
621       new_align = 1 << known_align_log;
622       if (new_align < known_align)
623 	continue;
624       fuzz += (-align_addr ^ growth) & (new_align - known_align);
625       known_align = new_align;
626     }
627   return fuzz;
628 }
629 
630 /* Compute a worst-case reference address of a branch so that it
631    can be safely used in the presence of aligned labels.  Since the
632    size of the branch itself is unknown, the size of the branch is
633    not included in the range.  I.e. for a forward branch, the reference
634    address is the end address of the branch as known from the previous
635    branch shortening pass, minus a value to account for possible size
636    increase due to alignment.  For a backward branch, it is the start
637    address of the branch as known from the current pass, plus a value
638    to account for possible size increase due to alignment.
639    NB.: Therefore, the maximum offset allowed for backward branches needs
640    to exclude the branch size.  */
641 
642 int
insn_current_reference_address(rtx branch)643 insn_current_reference_address (rtx branch)
644 {
645   rtx dest, seq;
646   int seq_uid;
647 
648   if (! INSN_ADDRESSES_SET_P ())
649     return 0;
650 
651   seq = NEXT_INSN (PREV_INSN (branch));
652   seq_uid = INSN_UID (seq);
653   if (!JUMP_P (branch))
654     /* This can happen for example on the PA; the objective is to know the
655        offset to address something in front of the start of the function.
656        Thus, we can treat it like a backward branch.
657        We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
658        any alignment we'd encounter, so we skip the call to align_fuzz.  */
659     return insn_current_address;
660   dest = JUMP_LABEL (branch);
661 
662   /* BRANCH has no proper alignment chain set, so use SEQ.
663      BRANCH also has no INSN_SHUID.  */
664   if (INSN_SHUID (seq) < INSN_SHUID (dest))
665     {
666       /* Forward branch.  */
667       return (insn_last_address + insn_lengths[seq_uid]
668 	      - align_fuzz (seq, dest, length_unit_log, ~0));
669     }
670   else
671     {
672       /* Backward branch.  */
673       return (insn_current_address
674 	      + align_fuzz (dest, seq, length_unit_log, ~0));
675     }
676 }
677 #endif /* HAVE_ATTR_length */
678 
679 /* Compute branch alignments based on frequency information in the
680    CFG.  */
681 
682 static unsigned int
compute_alignments(void)683 compute_alignments (void)
684 {
685   int log, max_skip, max_log;
686   basic_block bb;
687 
688   if (label_align)
689     {
690       free (label_align);
691       label_align = 0;
692     }
693 
694   max_labelno = max_label_num ();
695   min_labelno = get_first_label_num ();
696   label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
697 
698   /* If not optimizing or optimizing for size, don't assign any alignments.  */
699   if (! optimize || optimize_size)
700     return 0;
701 
702   FOR_EACH_BB (bb)
703     {
704       rtx label = BB_HEAD (bb);
705       int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
706       edge e;
707       edge_iterator ei;
708 
709       if (!LABEL_P (label)
710 	  || probably_never_executed_bb_p (bb))
711 	continue;
712       max_log = LABEL_ALIGN (label);
713       max_skip = LABEL_ALIGN_MAX_SKIP;
714 
715       FOR_EACH_EDGE (e, ei, bb->preds)
716 	{
717 	  if (e->flags & EDGE_FALLTHRU)
718 	    has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
719 	  else
720 	    branch_frequency += EDGE_FREQUENCY (e);
721 	}
722 
723       /* There are two purposes to align block with no fallthru incoming edge:
724 	 1) to avoid fetch stalls when branch destination is near cache boundary
725 	 2) to improve cache efficiency in case the previous block is not executed
726 	    (so it does not need to be in the cache).
727 
728 	 We to catch first case, we align frequently executed blocks.
729 	 To catch the second, we align blocks that are executed more frequently
730 	 than the predecessor and the predecessor is likely to not be executed
731 	 when function is called.  */
732 
733       if (!has_fallthru
734 	  && (branch_frequency > BB_FREQ_MAX / 10
735 	      || (bb->frequency > bb->prev_bb->frequency * 10
736 		  && (bb->prev_bb->frequency
737 		      <= ENTRY_BLOCK_PTR->frequency / 2))))
738 	{
739 	  log = JUMP_ALIGN (label);
740 	  if (max_log < log)
741 	    {
742 	      max_log = log;
743 	      max_skip = JUMP_ALIGN_MAX_SKIP;
744 	    }
745 	}
746       /* In case block is frequent and reached mostly by non-fallthru edge,
747 	 align it.  It is most likely a first block of loop.  */
748       if (has_fallthru
749 	  && maybe_hot_bb_p (bb)
750 	  && branch_frequency + fallthru_frequency > BB_FREQ_MAX / 10
751 	  && branch_frequency > fallthru_frequency * 2)
752 	{
753 	  log = LOOP_ALIGN (label);
754 	  if (max_log < log)
755 	    {
756 	      max_log = log;
757 	      max_skip = LOOP_ALIGN_MAX_SKIP;
758 	    }
759 	}
760       LABEL_TO_ALIGNMENT (label) = max_log;
761       LABEL_TO_MAX_SKIP (label) = max_skip;
762     }
763   return 0;
764 }
765 
766 struct tree_opt_pass pass_compute_alignments =
767 {
768   NULL,                                 /* name */
769   NULL,                                 /* gate */
770   compute_alignments,                   /* execute */
771   NULL,                                 /* sub */
772   NULL,                                 /* next */
773   0,                                    /* static_pass_number */
774   0,                                    /* tv_id */
775   0,                                    /* properties_required */
776   0,                                    /* properties_provided */
777   0,                                    /* properties_destroyed */
778   0,                                    /* todo_flags_start */
779   0,                                    /* todo_flags_finish */
780   0                                     /* letter */
781 };
782 
783 
784 /* Make a pass over all insns and compute their actual lengths by shortening
785    any branches of variable length if possible.  */
786 
787 /* shorten_branches might be called multiple times:  for example, the SH
788    port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
789    In order to do this, it needs proper length information, which it obtains
790    by calling shorten_branches.  This cannot be collapsed with
791    shorten_branches itself into a single pass unless we also want to integrate
792    reorg.c, since the branch splitting exposes new instructions with delay
793    slots.  */
794 
795 void
shorten_branches(rtx first ATTRIBUTE_UNUSED)796 shorten_branches (rtx first ATTRIBUTE_UNUSED)
797 {
798   rtx insn;
799   int max_uid;
800   int i;
801   int max_log;
802   int max_skip;
803 #ifdef HAVE_ATTR_length
804 #define MAX_CODE_ALIGN 16
805   rtx seq;
806   int something_changed = 1;
807   char *varying_length;
808   rtx body;
809   int uid;
810   rtx align_tab[MAX_CODE_ALIGN];
811 
812 #endif
813 
814   /* Compute maximum UID and allocate label_align / uid_shuid.  */
815   max_uid = get_max_uid ();
816 
817   /* Free uid_shuid before reallocating it.  */
818   free (uid_shuid);
819 
820   uid_shuid = XNEWVEC (int, max_uid);
821 
822   if (max_labelno != max_label_num ())
823     {
824       int old = max_labelno;
825       int n_labels;
826       int n_old_labels;
827 
828       max_labelno = max_label_num ();
829 
830       n_labels = max_labelno - min_labelno + 1;
831       n_old_labels = old - min_labelno + 1;
832 
833       label_align = xrealloc (label_align,
834 			      n_labels * sizeof (struct label_alignment));
835 
836       /* Range of labels grows monotonically in the function.  Failing here
837          means that the initialization of array got lost.  */
838       gcc_assert (n_old_labels <= n_labels);
839 
840       memset (label_align + n_old_labels, 0,
841 	      (n_labels - n_old_labels) * sizeof (struct label_alignment));
842     }
843 
844   /* Initialize label_align and set up uid_shuid to be strictly
845      monotonically rising with insn order.  */
846   /* We use max_log here to keep track of the maximum alignment we want to
847      impose on the next CODE_LABEL (or the current one if we are processing
848      the CODE_LABEL itself).  */
849 
850   max_log = 0;
851   max_skip = 0;
852 
853   for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
854     {
855       int log;
856 
857       INSN_SHUID (insn) = i++;
858       if (INSN_P (insn))
859 	continue;
860 
861       if (LABEL_P (insn))
862 	{
863 	  rtx next;
864 
865 	  /* Merge in alignments computed by compute_alignments.  */
866 	  log = LABEL_TO_ALIGNMENT (insn);
867 	  if (max_log < log)
868 	    {
869 	      max_log = log;
870 	      max_skip = LABEL_TO_MAX_SKIP (insn);
871 	    }
872 
873 	  log = LABEL_ALIGN (insn);
874 	  if (max_log < log)
875 	    {
876 	      max_log = log;
877 	      max_skip = LABEL_ALIGN_MAX_SKIP;
878 	    }
879 	  next = next_nonnote_insn (insn);
880 	  /* ADDR_VECs only take room if read-only data goes into the text
881 	     section.  */
882 	  if (JUMP_TABLES_IN_TEXT_SECTION
883 	      || readonly_data_section == text_section)
884 	    if (next && JUMP_P (next))
885 	      {
886 		rtx nextbody = PATTERN (next);
887 		if (GET_CODE (nextbody) == ADDR_VEC
888 		    || GET_CODE (nextbody) == ADDR_DIFF_VEC)
889 		  {
890 		    log = ADDR_VEC_ALIGN (next);
891 		    if (max_log < log)
892 		      {
893 			max_log = log;
894 			max_skip = LABEL_ALIGN_MAX_SKIP;
895 		      }
896 		  }
897 	      }
898 	  LABEL_TO_ALIGNMENT (insn) = max_log;
899 	  LABEL_TO_MAX_SKIP (insn) = max_skip;
900 	  max_log = 0;
901 	  max_skip = 0;
902 	}
903       else if (BARRIER_P (insn))
904 	{
905 	  rtx label;
906 
907 	  for (label = insn; label && ! INSN_P (label);
908 	       label = NEXT_INSN (label))
909 	    if (LABEL_P (label))
910 	      {
911 		log = LABEL_ALIGN_AFTER_BARRIER (insn);
912 		if (max_log < log)
913 		  {
914 		    max_log = log;
915 		    max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
916 		  }
917 		break;
918 	      }
919 	}
920     }
921 #ifdef HAVE_ATTR_length
922 
923   /* Allocate the rest of the arrays.  */
924   insn_lengths = XNEWVEC (int, max_uid);
925   insn_lengths_max_uid = max_uid;
926   /* Syntax errors can lead to labels being outside of the main insn stream.
927      Initialize insn_addresses, so that we get reproducible results.  */
928   INSN_ADDRESSES_ALLOC (max_uid);
929 
930   varying_length = XCNEWVEC (char, max_uid);
931 
932   /* Initialize uid_align.  We scan instructions
933      from end to start, and keep in align_tab[n] the last seen insn
934      that does an alignment of at least n+1, i.e. the successor
935      in the alignment chain for an insn that does / has a known
936      alignment of n.  */
937   uid_align = XCNEWVEC (rtx, max_uid);
938 
939   for (i = MAX_CODE_ALIGN; --i >= 0;)
940     align_tab[i] = NULL_RTX;
941   seq = get_last_insn ();
942   for (; seq; seq = PREV_INSN (seq))
943     {
944       int uid = INSN_UID (seq);
945       int log;
946       log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
947       uid_align[uid] = align_tab[0];
948       if (log)
949 	{
950 	  /* Found an alignment label.  */
951 	  uid_align[uid] = align_tab[log];
952 	  for (i = log - 1; i >= 0; i--)
953 	    align_tab[i] = seq;
954 	}
955     }
956 #ifdef CASE_VECTOR_SHORTEN_MODE
957   if (optimize)
958     {
959       /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
960          label fields.  */
961 
962       int min_shuid = INSN_SHUID (get_insns ()) - 1;
963       int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
964       int rel;
965 
966       for (insn = first; insn != 0; insn = NEXT_INSN (insn))
967 	{
968 	  rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
969 	  int len, i, min, max, insn_shuid;
970 	  int min_align;
971 	  addr_diff_vec_flags flags;
972 
973 	  if (!JUMP_P (insn)
974 	      || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
975 	    continue;
976 	  pat = PATTERN (insn);
977 	  len = XVECLEN (pat, 1);
978 	  gcc_assert (len > 0);
979 	  min_align = MAX_CODE_ALIGN;
980 	  for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
981 	    {
982 	      rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
983 	      int shuid = INSN_SHUID (lab);
984 	      if (shuid < min)
985 		{
986 		  min = shuid;
987 		  min_lab = lab;
988 		}
989 	      if (shuid > max)
990 		{
991 		  max = shuid;
992 		  max_lab = lab;
993 		}
994 	      if (min_align > LABEL_TO_ALIGNMENT (lab))
995 		min_align = LABEL_TO_ALIGNMENT (lab);
996 	    }
997 	  XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
998 	  XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
999 	  insn_shuid = INSN_SHUID (insn);
1000 	  rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1001 	  memset (&flags, 0, sizeof (flags));
1002 	  flags.min_align = min_align;
1003 	  flags.base_after_vec = rel > insn_shuid;
1004 	  flags.min_after_vec  = min > insn_shuid;
1005 	  flags.max_after_vec  = max > insn_shuid;
1006 	  flags.min_after_base = min > rel;
1007 	  flags.max_after_base = max > rel;
1008 	  ADDR_DIFF_VEC_FLAGS (pat) = flags;
1009 	}
1010     }
1011 #endif /* CASE_VECTOR_SHORTEN_MODE */
1012 
1013   /* Compute initial lengths, addresses, and varying flags for each insn.  */
1014   for (insn_current_address = 0, insn = first;
1015        insn != 0;
1016        insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1017     {
1018       uid = INSN_UID (insn);
1019 
1020       insn_lengths[uid] = 0;
1021 
1022       if (LABEL_P (insn))
1023 	{
1024 	  int log = LABEL_TO_ALIGNMENT (insn);
1025 	  if (log)
1026 	    {
1027 	      int align = 1 << log;
1028 	      int new_address = (insn_current_address + align - 1) & -align;
1029 	      insn_lengths[uid] = new_address - insn_current_address;
1030 	    }
1031 	}
1032 
1033       INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1034 
1035       if (NOTE_P (insn) || BARRIER_P (insn)
1036 	  || LABEL_P (insn))
1037 	continue;
1038       if (INSN_DELETED_P (insn))
1039 	continue;
1040 
1041       body = PATTERN (insn);
1042       if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1043 	{
1044 	  /* This only takes room if read-only data goes into the text
1045 	     section.  */
1046 	  if (JUMP_TABLES_IN_TEXT_SECTION
1047 	      || readonly_data_section == text_section)
1048 	    insn_lengths[uid] = (XVECLEN (body,
1049 					  GET_CODE (body) == ADDR_DIFF_VEC)
1050 				 * GET_MODE_SIZE (GET_MODE (body)));
1051 	  /* Alignment is handled by ADDR_VEC_ALIGN.  */
1052 	}
1053       else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1054 	insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1055       else if (GET_CODE (body) == SEQUENCE)
1056 	{
1057 	  int i;
1058 	  int const_delay_slots;
1059 #ifdef DELAY_SLOTS
1060 	  const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1061 #else
1062 	  const_delay_slots = 0;
1063 #endif
1064 	  /* Inside a delay slot sequence, we do not do any branch shortening
1065 	     if the shortening could change the number of delay slots
1066 	     of the branch.  */
1067 	  for (i = 0; i < XVECLEN (body, 0); i++)
1068 	    {
1069 	      rtx inner_insn = XVECEXP (body, 0, i);
1070 	      int inner_uid = INSN_UID (inner_insn);
1071 	      int inner_length;
1072 
1073 	      if (GET_CODE (body) == ASM_INPUT
1074 		  || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1075 		inner_length = (asm_insn_count (PATTERN (inner_insn))
1076 				* insn_default_length (inner_insn));
1077 	      else
1078 		inner_length = insn_default_length (inner_insn);
1079 
1080 	      insn_lengths[inner_uid] = inner_length;
1081 	      if (const_delay_slots)
1082 		{
1083 		  if ((varying_length[inner_uid]
1084 		       = insn_variable_length_p (inner_insn)) != 0)
1085 		    varying_length[uid] = 1;
1086 		  INSN_ADDRESSES (inner_uid) = (insn_current_address
1087 						+ insn_lengths[uid]);
1088 		}
1089 	      else
1090 		varying_length[inner_uid] = 0;
1091 	      insn_lengths[uid] += inner_length;
1092 	    }
1093 	}
1094       else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1095 	{
1096 	  insn_lengths[uid] = insn_default_length (insn);
1097 	  varying_length[uid] = insn_variable_length_p (insn);
1098 	}
1099 
1100       /* If needed, do any adjustment.  */
1101 #ifdef ADJUST_INSN_LENGTH
1102       ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1103       if (insn_lengths[uid] < 0)
1104 	fatal_insn ("negative insn length", insn);
1105 #endif
1106     }
1107 
1108   /* Now loop over all the insns finding varying length insns.  For each,
1109      get the current insn length.  If it has changed, reflect the change.
1110      When nothing changes for a full pass, we are done.  */
1111 
1112   while (something_changed)
1113     {
1114       something_changed = 0;
1115       insn_current_align = MAX_CODE_ALIGN - 1;
1116       for (insn_current_address = 0, insn = first;
1117 	   insn != 0;
1118 	   insn = NEXT_INSN (insn))
1119 	{
1120 	  int new_length;
1121 #ifdef ADJUST_INSN_LENGTH
1122 	  int tmp_length;
1123 #endif
1124 	  int length_align;
1125 
1126 	  uid = INSN_UID (insn);
1127 
1128 	  if (LABEL_P (insn))
1129 	    {
1130 	      int log = LABEL_TO_ALIGNMENT (insn);
1131 	      if (log > insn_current_align)
1132 		{
1133 		  int align = 1 << log;
1134 		  int new_address= (insn_current_address + align - 1) & -align;
1135 		  insn_lengths[uid] = new_address - insn_current_address;
1136 		  insn_current_align = log;
1137 		  insn_current_address = new_address;
1138 		}
1139 	      else
1140 		insn_lengths[uid] = 0;
1141 	      INSN_ADDRESSES (uid) = insn_current_address;
1142 	      continue;
1143 	    }
1144 
1145 	  length_align = INSN_LENGTH_ALIGNMENT (insn);
1146 	  if (length_align < insn_current_align)
1147 	    insn_current_align = length_align;
1148 
1149 	  insn_last_address = INSN_ADDRESSES (uid);
1150 	  INSN_ADDRESSES (uid) = insn_current_address;
1151 
1152 #ifdef CASE_VECTOR_SHORTEN_MODE
1153 	  if (optimize && JUMP_P (insn)
1154 	      && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1155 	    {
1156 	      rtx body = PATTERN (insn);
1157 	      int old_length = insn_lengths[uid];
1158 	      rtx rel_lab = XEXP (XEXP (body, 0), 0);
1159 	      rtx min_lab = XEXP (XEXP (body, 2), 0);
1160 	      rtx max_lab = XEXP (XEXP (body, 3), 0);
1161 	      int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1162 	      int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1163 	      int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1164 	      rtx prev;
1165 	      int rel_align = 0;
1166 	      addr_diff_vec_flags flags;
1167 
1168 	      /* Avoid automatic aggregate initialization.  */
1169 	      flags = ADDR_DIFF_VEC_FLAGS (body);
1170 
1171 	      /* Try to find a known alignment for rel_lab.  */
1172 	      for (prev = rel_lab;
1173 		   prev
1174 		   && ! insn_lengths[INSN_UID (prev)]
1175 		   && ! (varying_length[INSN_UID (prev)] & 1);
1176 		   prev = PREV_INSN (prev))
1177 		if (varying_length[INSN_UID (prev)] & 2)
1178 		  {
1179 		    rel_align = LABEL_TO_ALIGNMENT (prev);
1180 		    break;
1181 		  }
1182 
1183 	      /* See the comment on addr_diff_vec_flags in rtl.h for the
1184 		 meaning of the flags values.  base: REL_LAB   vec: INSN  */
1185 	      /* Anything after INSN has still addresses from the last
1186 		 pass; adjust these so that they reflect our current
1187 		 estimate for this pass.  */
1188 	      if (flags.base_after_vec)
1189 		rel_addr += insn_current_address - insn_last_address;
1190 	      if (flags.min_after_vec)
1191 		min_addr += insn_current_address - insn_last_address;
1192 	      if (flags.max_after_vec)
1193 		max_addr += insn_current_address - insn_last_address;
1194 	      /* We want to know the worst case, i.e. lowest possible value
1195 		 for the offset of MIN_LAB.  If MIN_LAB is after REL_LAB,
1196 		 its offset is positive, and we have to be wary of code shrink;
1197 		 otherwise, it is negative, and we have to be vary of code
1198 		 size increase.  */
1199 	      if (flags.min_after_base)
1200 		{
1201 		  /* If INSN is between REL_LAB and MIN_LAB, the size
1202 		     changes we are about to make can change the alignment
1203 		     within the observed offset, therefore we have to break
1204 		     it up into two parts that are independent.  */
1205 		  if (! flags.base_after_vec && flags.min_after_vec)
1206 		    {
1207 		      min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1208 		      min_addr -= align_fuzz (insn, min_lab, 0, 0);
1209 		    }
1210 		  else
1211 		    min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1212 		}
1213 	      else
1214 		{
1215 		  if (flags.base_after_vec && ! flags.min_after_vec)
1216 		    {
1217 		      min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1218 		      min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1219 		    }
1220 		  else
1221 		    min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1222 		}
1223 	      /* Likewise, determine the highest lowest possible value
1224 		 for the offset of MAX_LAB.  */
1225 	      if (flags.max_after_base)
1226 		{
1227 		  if (! flags.base_after_vec && flags.max_after_vec)
1228 		    {
1229 		      max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1230 		      max_addr += align_fuzz (insn, max_lab, 0, ~0);
1231 		    }
1232 		  else
1233 		    max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1234 		}
1235 	      else
1236 		{
1237 		  if (flags.base_after_vec && ! flags.max_after_vec)
1238 		    {
1239 		      max_addr += align_fuzz (max_lab, insn, 0, 0);
1240 		      max_addr += align_fuzz (insn, rel_lab, 0, 0);
1241 		    }
1242 		  else
1243 		    max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1244 		}
1245 	      PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1246 							max_addr - rel_addr,
1247 							body));
1248 	      if (JUMP_TABLES_IN_TEXT_SECTION
1249 		  || readonly_data_section == text_section)
1250 		{
1251 		  insn_lengths[uid]
1252 		    = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1253 		  insn_current_address += insn_lengths[uid];
1254 		  if (insn_lengths[uid] != old_length)
1255 		    something_changed = 1;
1256 		}
1257 
1258 	      continue;
1259 	    }
1260 #endif /* CASE_VECTOR_SHORTEN_MODE */
1261 
1262 	  if (! (varying_length[uid]))
1263 	    {
1264 	      if (NONJUMP_INSN_P (insn)
1265 		  && GET_CODE (PATTERN (insn)) == SEQUENCE)
1266 		{
1267 		  int i;
1268 
1269 		  body = PATTERN (insn);
1270 		  for (i = 0; i < XVECLEN (body, 0); i++)
1271 		    {
1272 		      rtx inner_insn = XVECEXP (body, 0, i);
1273 		      int inner_uid = INSN_UID (inner_insn);
1274 
1275 		      INSN_ADDRESSES (inner_uid) = insn_current_address;
1276 
1277 		      insn_current_address += insn_lengths[inner_uid];
1278 		    }
1279 		}
1280 	      else
1281 		insn_current_address += insn_lengths[uid];
1282 
1283 	      continue;
1284 	    }
1285 
1286 	  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1287 	    {
1288 	      int i;
1289 
1290 	      body = PATTERN (insn);
1291 	      new_length = 0;
1292 	      for (i = 0; i < XVECLEN (body, 0); i++)
1293 		{
1294 		  rtx inner_insn = XVECEXP (body, 0, i);
1295 		  int inner_uid = INSN_UID (inner_insn);
1296 		  int inner_length;
1297 
1298 		  INSN_ADDRESSES (inner_uid) = insn_current_address;
1299 
1300 		  /* insn_current_length returns 0 for insns with a
1301 		     non-varying length.  */
1302 		  if (! varying_length[inner_uid])
1303 		    inner_length = insn_lengths[inner_uid];
1304 		  else
1305 		    inner_length = insn_current_length (inner_insn);
1306 
1307 		  if (inner_length != insn_lengths[inner_uid])
1308 		    {
1309 		      insn_lengths[inner_uid] = inner_length;
1310 		      something_changed = 1;
1311 		    }
1312 		  insn_current_address += insn_lengths[inner_uid];
1313 		  new_length += inner_length;
1314 		}
1315 	    }
1316 	  else
1317 	    {
1318 	      new_length = insn_current_length (insn);
1319 	      insn_current_address += new_length;
1320 	    }
1321 
1322 #ifdef ADJUST_INSN_LENGTH
1323 	  /* If needed, do any adjustment.  */
1324 	  tmp_length = new_length;
1325 	  ADJUST_INSN_LENGTH (insn, new_length);
1326 	  insn_current_address += (new_length - tmp_length);
1327 #endif
1328 
1329 	  if (new_length != insn_lengths[uid])
1330 	    {
1331 	      insn_lengths[uid] = new_length;
1332 	      something_changed = 1;
1333 	    }
1334 	}
1335       /* For a non-optimizing compile, do only a single pass.  */
1336       if (!optimize)
1337 	break;
1338     }
1339 
1340   free (varying_length);
1341 
1342 #endif /* HAVE_ATTR_length */
1343 }
1344 
1345 #ifdef HAVE_ATTR_length
1346 /* Given the body of an INSN known to be generated by an ASM statement, return
1347    the number of machine instructions likely to be generated for this insn.
1348    This is used to compute its length.  */
1349 
1350 static int
asm_insn_count(rtx body)1351 asm_insn_count (rtx body)
1352 {
1353   const char *template;
1354   int count = 1;
1355 
1356   if (GET_CODE (body) == ASM_INPUT)
1357     template = XSTR (body, 0);
1358   else
1359     template = decode_asm_operands (body, NULL, NULL, NULL, NULL);
1360 
1361   for (; *template; template++)
1362     if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template) || *template == '\n')
1363       count++;
1364 
1365   return count;
1366 }
1367 #endif
1368 
1369 /* Output assembler code for the start of a function,
1370    and initialize some of the variables in this file
1371    for the new function.  The label for the function and associated
1372    assembler pseudo-ops have already been output in `assemble_start_function'.
1373 
1374    FIRST is the first insn of the rtl for the function being compiled.
1375    FILE is the file to write assembler code to.
1376    OPTIMIZE is nonzero if we should eliminate redundant
1377      test and compare insns.  */
1378 
1379 void
final_start_function(rtx first ATTRIBUTE_UNUSED,FILE * file,int optimize ATTRIBUTE_UNUSED)1380 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1381 		      int optimize ATTRIBUTE_UNUSED)
1382 {
1383   block_depth = 0;
1384 
1385   this_is_asm_operands = 0;
1386 
1387   last_filename = locator_file (prologue_locator);
1388   last_linenum = locator_line (prologue_locator);
1389 
1390   high_block_linenum = high_function_linenum = last_linenum;
1391 
1392   (*debug_hooks->begin_prologue) (last_linenum, last_filename);
1393 
1394 #if defined (DWARF2_UNWIND_INFO) || defined (TARGET_UNWIND_INFO)
1395   if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1396     dwarf2out_begin_prologue (0, NULL);
1397 #endif
1398 
1399 #ifdef LEAF_REG_REMAP
1400   if (current_function_uses_only_leaf_regs)
1401     leaf_renumber_regs (first);
1402 #endif
1403 
1404   /* The Sun386i and perhaps other machines don't work right
1405      if the profiling code comes after the prologue.  */
1406 #ifdef PROFILE_BEFORE_PROLOGUE
1407   if (current_function_profile)
1408     profile_function (file);
1409 #endif /* PROFILE_BEFORE_PROLOGUE */
1410 
1411 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1412   if (dwarf2out_do_frame ())
1413     dwarf2out_frame_debug (NULL_RTX, false);
1414 #endif
1415 
1416   /* If debugging, assign block numbers to all of the blocks in this
1417      function.  */
1418   if (write_symbols)
1419     {
1420       reemit_insn_block_notes ();
1421       number_blocks (current_function_decl);
1422       /* We never actually put out begin/end notes for the top-level
1423 	 block in the function.  But, conceptually, that block is
1424 	 always needed.  */
1425       TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1426     }
1427 
1428   /* First output the function prologue: code to set up the stack frame.  */
1429   targetm.asm_out.function_prologue (file, get_frame_size ());
1430 
1431   /* If the machine represents the prologue as RTL, the profiling code must
1432      be emitted when NOTE_INSN_PROLOGUE_END is scanned.  */
1433 #ifdef HAVE_prologue
1434   if (! HAVE_prologue)
1435 #endif
1436     profile_after_prologue (file);
1437 }
1438 
1439 static void
profile_after_prologue(FILE * file ATTRIBUTE_UNUSED)1440 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1441 {
1442 #ifndef PROFILE_BEFORE_PROLOGUE
1443   if (current_function_profile)
1444     profile_function (file);
1445 #endif /* not PROFILE_BEFORE_PROLOGUE */
1446 }
1447 
1448 static void
profile_function(FILE * file ATTRIBUTE_UNUSED)1449 profile_function (FILE *file ATTRIBUTE_UNUSED)
1450 {
1451 #ifndef NO_PROFILE_COUNTERS
1452 # define NO_PROFILE_COUNTERS	0
1453 #endif
1454 #if defined(ASM_OUTPUT_REG_PUSH)
1455   int sval = current_function_returns_struct;
1456   rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1457 #if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1458   int cxt = cfun->static_chain_decl != NULL;
1459 #endif
1460 #endif /* ASM_OUTPUT_REG_PUSH */
1461 
1462   if (! NO_PROFILE_COUNTERS)
1463     {
1464       int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1465       switch_to_section (data_section);
1466       ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1467       targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1468       assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1469     }
1470 
1471   switch_to_section (current_function_section ());
1472 
1473 #if defined(ASM_OUTPUT_REG_PUSH)
1474   if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1475     ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
1476 #endif
1477 
1478 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1479   if (cxt)
1480     ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
1481 #else
1482 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1483   if (cxt)
1484     {
1485       ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
1486     }
1487 #endif
1488 #endif
1489 
1490   FUNCTION_PROFILER (file, current_function_funcdef_no);
1491 
1492 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1493   if (cxt)
1494     ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
1495 #else
1496 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1497   if (cxt)
1498     {
1499       ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
1500     }
1501 #endif
1502 #endif
1503 
1504 #if defined(ASM_OUTPUT_REG_PUSH)
1505   if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1506     ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
1507 #endif
1508 }
1509 
1510 /* Output assembler code for the end of a function.
1511    For clarity, args are same as those of `final_start_function'
1512    even though not all of them are needed.  */
1513 
1514 void
final_end_function(void)1515 final_end_function (void)
1516 {
1517   app_disable ();
1518 
1519   (*debug_hooks->end_function) (high_function_linenum);
1520 
1521   /* Finally, output the function epilogue:
1522      code to restore the stack frame and return to the caller.  */
1523   targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1524 
1525   /* And debug output.  */
1526   (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1527 
1528 #if defined (DWARF2_UNWIND_INFO)
1529   if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
1530       && dwarf2out_do_frame ())
1531     dwarf2out_end_epilogue (last_linenum, last_filename);
1532 #endif
1533 }
1534 
1535 /* Output assembler code for some insns: all or part of a function.
1536    For description of args, see `final_start_function', above.  */
1537 
1538 void
final(rtx first,FILE * file,int optimize)1539 final (rtx first, FILE *file, int optimize)
1540 {
1541   rtx insn;
1542   int max_uid = 0;
1543   int seen = 0;
1544 
1545   last_ignored_compare = 0;
1546 
1547 #ifdef SDB_DEBUGGING_INFO
1548   /* When producing SDB debugging info, delete troublesome line number
1549      notes from inlined functions in other files as well as duplicate
1550      line number notes.  */
1551   if (write_symbols == SDB_DEBUG)
1552     {
1553       rtx last = 0;
1554       for (insn = first; insn; insn = NEXT_INSN (insn))
1555 	if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
1556 	  {
1557 	    if (last != 0
1558 #ifdef USE_MAPPED_LOCATION
1559 		&& NOTE_SOURCE_LOCATION (insn) == NOTE_SOURCE_LOCATION (last)
1560 #else
1561 		&& NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last)
1562 		&& NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last)
1563 #endif
1564 	      )
1565 	      {
1566 		delete_insn (insn);	/* Use delete_note.  */
1567 		continue;
1568 	      }
1569 	    last = insn;
1570 	  }
1571     }
1572 #endif
1573 
1574   for (insn = first; insn; insn = NEXT_INSN (insn))
1575     {
1576       if (INSN_UID (insn) > max_uid)       /* Find largest UID.  */
1577 	max_uid = INSN_UID (insn);
1578 #ifdef HAVE_cc0
1579       /* If CC tracking across branches is enabled, record the insn which
1580 	 jumps to each branch only reached from one place.  */
1581       if (optimize && JUMP_P (insn))
1582 	{
1583 	  rtx lab = JUMP_LABEL (insn);
1584 	  if (lab && LABEL_NUSES (lab) == 1)
1585 	    {
1586 	      LABEL_REFS (lab) = insn;
1587 	    }
1588 	}
1589 #endif
1590     }
1591 
1592   init_recog ();
1593 
1594   CC_STATUS_INIT;
1595 
1596   /* Output the insns.  */
1597   for (insn = NEXT_INSN (first); insn;)
1598     {
1599 #ifdef HAVE_ATTR_length
1600       if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1601 	{
1602 	  /* This can be triggered by bugs elsewhere in the compiler if
1603 	     new insns are created after init_insn_lengths is called.  */
1604 	  gcc_assert (NOTE_P (insn));
1605 	  insn_current_address = -1;
1606 	}
1607       else
1608 	insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1609 #endif /* HAVE_ATTR_length */
1610 
1611       insn = final_scan_insn (insn, file, optimize, 0, &seen);
1612     }
1613 }
1614 
1615 const char *
get_insn_template(int code,rtx insn)1616 get_insn_template (int code, rtx insn)
1617 {
1618   switch (insn_data[code].output_format)
1619     {
1620     case INSN_OUTPUT_FORMAT_SINGLE:
1621       return insn_data[code].output.single;
1622     case INSN_OUTPUT_FORMAT_MULTI:
1623       return insn_data[code].output.multi[which_alternative];
1624     case INSN_OUTPUT_FORMAT_FUNCTION:
1625       gcc_assert (insn);
1626       return (*insn_data[code].output.function) (recog_data.operand, insn);
1627 
1628     default:
1629       gcc_unreachable ();
1630     }
1631 }
1632 
1633 /* Emit the appropriate declaration for an alternate-entry-point
1634    symbol represented by INSN, to FILE.  INSN is a CODE_LABEL with
1635    LABEL_KIND != LABEL_NORMAL.
1636 
1637    The case fall-through in this function is intentional.  */
1638 static void
output_alternate_entry_point(FILE * file,rtx insn)1639 output_alternate_entry_point (FILE *file, rtx insn)
1640 {
1641   const char *name = LABEL_NAME (insn);
1642 
1643   switch (LABEL_KIND (insn))
1644     {
1645     case LABEL_WEAK_ENTRY:
1646 #ifdef ASM_WEAKEN_LABEL
1647       ASM_WEAKEN_LABEL (file, name);
1648 #endif
1649     case LABEL_GLOBAL_ENTRY:
1650       targetm.asm_out.globalize_label (file, name);
1651     case LABEL_STATIC_ENTRY:
1652 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1653       ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1654 #endif
1655       ASM_OUTPUT_LABEL (file, name);
1656       break;
1657 
1658     case LABEL_NORMAL:
1659     default:
1660       gcc_unreachable ();
1661     }
1662 }
1663 
1664 /* The final scan for one insn, INSN.
1665    Args are same as in `final', except that INSN
1666    is the insn being scanned.
1667    Value returned is the next insn to be scanned.
1668 
1669    NOPEEPHOLES is the flag to disallow peephole processing (currently
1670    used for within delayed branch sequence output).
1671 
1672    SEEN is used to track the end of the prologue, for emitting
1673    debug information.  We force the emission of a line note after
1674    both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
1675    at the beginning of the second basic block, whichever comes
1676    first.  */
1677 
1678 rtx
final_scan_insn(rtx insn,FILE * file,int optimize ATTRIBUTE_UNUSED,int nopeepholes ATTRIBUTE_UNUSED,int * seen)1679 final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1680 		 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
1681 {
1682 #ifdef HAVE_cc0
1683   rtx set;
1684 #endif
1685   rtx next;
1686 
1687   insn_counter++;
1688 
1689   /* Ignore deleted insns.  These can occur when we split insns (due to a
1690      template of "#") while not optimizing.  */
1691   if (INSN_DELETED_P (insn))
1692     return NEXT_INSN (insn);
1693 
1694   switch (GET_CODE (insn))
1695     {
1696     case NOTE:
1697       switch (NOTE_LINE_NUMBER (insn))
1698 	{
1699 	case NOTE_INSN_DELETED:
1700 	case NOTE_INSN_FUNCTION_END:
1701 	case NOTE_INSN_REPEATED_LINE_NUMBER:
1702 	case NOTE_INSN_EXPECTED_VALUE:
1703 	  break;
1704 
1705 	case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1706 	  in_cold_section_p = !in_cold_section_p;
1707 	  (*debug_hooks->switch_text_section) ();
1708 	  switch_to_section (current_function_section ());
1709 	  break;
1710 
1711 	case NOTE_INSN_BASIC_BLOCK:
1712 #ifdef TARGET_UNWIND_INFO
1713 	  targetm.asm_out.unwind_emit (asm_out_file, insn);
1714 #endif
1715 
1716 	  if (flag_debug_asm)
1717 	    fprintf (asm_out_file, "\t%s basic block %d\n",
1718 		     ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1719 
1720 	  if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
1721 	    {
1722 	      *seen |= SEEN_EMITTED;
1723 	      force_source_line = true;
1724 	    }
1725 	  else
1726 	    *seen |= SEEN_BB;
1727 
1728 	  break;
1729 
1730 	case NOTE_INSN_EH_REGION_BEG:
1731 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1732 				  NOTE_EH_HANDLER (insn));
1733 	  break;
1734 
1735 	case NOTE_INSN_EH_REGION_END:
1736 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1737 				  NOTE_EH_HANDLER (insn));
1738 	  break;
1739 
1740 	case NOTE_INSN_PROLOGUE_END:
1741 	  targetm.asm_out.function_end_prologue (file);
1742 	  profile_after_prologue (file);
1743 
1744 	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1745 	    {
1746 	      *seen |= SEEN_EMITTED;
1747 	      force_source_line = true;
1748 	    }
1749 	  else
1750 	    *seen |= SEEN_NOTE;
1751 
1752 	  break;
1753 
1754 	case NOTE_INSN_EPILOGUE_BEG:
1755 	  targetm.asm_out.function_begin_epilogue (file);
1756 	  break;
1757 
1758 	case NOTE_INSN_FUNCTION_BEG:
1759 	  app_disable ();
1760 	  (*debug_hooks->end_prologue) (last_linenum, last_filename);
1761 
1762 	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1763 	    {
1764 	      *seen |= SEEN_EMITTED;
1765 	      force_source_line = true;
1766 	    }
1767 	  else
1768 	    *seen |= SEEN_NOTE;
1769 
1770 	  break;
1771 
1772 	case NOTE_INSN_BLOCK_BEG:
1773 	  if (debug_info_level == DINFO_LEVEL_NORMAL
1774 	      || debug_info_level == DINFO_LEVEL_VERBOSE
1775 	      || write_symbols == DWARF2_DEBUG
1776 	      || write_symbols == VMS_AND_DWARF2_DEBUG
1777 	      || write_symbols == VMS_DEBUG)
1778 	    {
1779 	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1780 
1781 	      app_disable ();
1782 	      ++block_depth;
1783 	      high_block_linenum = last_linenum;
1784 
1785 	      /* Output debugging info about the symbol-block beginning.  */
1786 	      (*debug_hooks->begin_block) (last_linenum, n);
1787 
1788 	      /* Mark this block as output.  */
1789 	      TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1790 	    }
1791 	  break;
1792 
1793 	case NOTE_INSN_BLOCK_END:
1794 	  if (debug_info_level == DINFO_LEVEL_NORMAL
1795 	      || debug_info_level == DINFO_LEVEL_VERBOSE
1796 	      || write_symbols == DWARF2_DEBUG
1797 	      || write_symbols == VMS_AND_DWARF2_DEBUG
1798 	      || write_symbols == VMS_DEBUG)
1799 	    {
1800 	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1801 
1802 	      app_disable ();
1803 
1804 	      /* End of a symbol-block.  */
1805 	      --block_depth;
1806 	      gcc_assert (block_depth >= 0);
1807 
1808 	      (*debug_hooks->end_block) (high_block_linenum, n);
1809 	    }
1810 	  break;
1811 
1812 	case NOTE_INSN_DELETED_LABEL:
1813 	  /* Emit the label.  We may have deleted the CODE_LABEL because
1814 	     the label could be proved to be unreachable, though still
1815 	     referenced (in the form of having its address taken.  */
1816 	  ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1817 	  break;
1818 
1819 	case NOTE_INSN_VAR_LOCATION:
1820 	  (*debug_hooks->var_location) (insn);
1821 	  break;
1822 
1823 	case 0:
1824 	  break;
1825 
1826 	default:
1827 	  gcc_assert (NOTE_LINE_NUMBER (insn) > 0);
1828 	  break;
1829 	}
1830       break;
1831 
1832     case BARRIER:
1833 #if defined (DWARF2_UNWIND_INFO)
1834       if (dwarf2out_do_frame ())
1835 	dwarf2out_frame_debug (insn, false);
1836 #endif
1837       break;
1838 
1839     case CODE_LABEL:
1840       /* The target port might emit labels in the output function for
1841 	 some insn, e.g. sh.c output_branchy_insn.  */
1842       if (CODE_LABEL_NUMBER (insn) <= max_labelno)
1843 	{
1844 	  int align = LABEL_TO_ALIGNMENT (insn);
1845 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1846 	  int max_skip = LABEL_TO_MAX_SKIP (insn);
1847 #endif
1848 
1849 	  if (align && NEXT_INSN (insn))
1850 	    {
1851 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1852 	      ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
1853 #else
1854 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1855               ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
1856 #else
1857 	      ASM_OUTPUT_ALIGN (file, align);
1858 #endif
1859 #endif
1860 	    }
1861 	}
1862 #ifdef HAVE_cc0
1863       CC_STATUS_INIT;
1864       /* If this label is reached from only one place, set the condition
1865 	 codes from the instruction just before the branch.  */
1866 
1867       /* Disabled because some insns set cc_status in the C output code
1868 	 and NOTICE_UPDATE_CC alone can set incorrect status.  */
1869       if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1870 	{
1871 	  rtx jump = LABEL_REFS (insn);
1872 	  rtx barrier = prev_nonnote_insn (insn);
1873 	  rtx prev;
1874 	  /* If the LABEL_REFS field of this label has been set to point
1875 	     at a branch, the predecessor of the branch is a regular
1876 	     insn, and that branch is the only way to reach this label,
1877 	     set the condition codes based on the branch and its
1878 	     predecessor.  */
1879 	  if (barrier && BARRIER_P (barrier)
1880 	      && jump && JUMP_P (jump)
1881 	      && (prev = prev_nonnote_insn (jump))
1882 	      && NONJUMP_INSN_P (prev))
1883 	    {
1884 	      NOTICE_UPDATE_CC (PATTERN (prev), prev);
1885 	      NOTICE_UPDATE_CC (PATTERN (jump), jump);
1886 	    }
1887 	}
1888 #endif
1889 
1890       if (LABEL_NAME (insn))
1891 	(*debug_hooks->label) (insn);
1892 
1893       if (app_on)
1894 	{
1895 	  fputs (ASM_APP_OFF, file);
1896 	  app_on = 0;
1897 	}
1898 
1899       next = next_nonnote_insn (insn);
1900       if (next != 0 && JUMP_P (next))
1901 	{
1902 	  rtx nextbody = PATTERN (next);
1903 
1904 	  /* If this label is followed by a jump-table,
1905 	     make sure we put the label in the read-only section.  Also
1906 	     possibly write the label and jump table together.  */
1907 
1908 	  if (GET_CODE (nextbody) == ADDR_VEC
1909 	      || GET_CODE (nextbody) == ADDR_DIFF_VEC)
1910 	    {
1911 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1912 	      /* In this case, the case vector is being moved by the
1913 		 target, so don't output the label at all.  Leave that
1914 		 to the back end macros.  */
1915 #else
1916 	      if (! JUMP_TABLES_IN_TEXT_SECTION)
1917 		{
1918 		  int log_align;
1919 
1920 		  switch_to_section (targetm.asm_out.function_rodata_section
1921 				     (current_function_decl));
1922 
1923 #ifdef ADDR_VEC_ALIGN
1924 		  log_align = ADDR_VEC_ALIGN (next);
1925 #else
1926 		  log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1927 #endif
1928 		  ASM_OUTPUT_ALIGN (file, log_align);
1929 		}
1930 	      else
1931 		switch_to_section (current_function_section ());
1932 
1933 #ifdef ASM_OUTPUT_CASE_LABEL
1934 	      ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
1935 				     next);
1936 #else
1937 	      targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
1938 #endif
1939 #endif
1940 	      break;
1941 	    }
1942 	}
1943       if (LABEL_ALT_ENTRY_P (insn))
1944 	output_alternate_entry_point (file, insn);
1945       else
1946 	targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
1947       break;
1948 
1949     default:
1950       {
1951 	rtx body = PATTERN (insn);
1952 	int insn_code_number;
1953 	const char *template;
1954 
1955 #ifdef HAVE_conditional_execution
1956 	/* Reset this early so it is correct for ASM statements.  */
1957 	current_insn_predicate = NULL_RTX;
1958 #endif
1959 	/* An INSN, JUMP_INSN or CALL_INSN.
1960 	   First check for special kinds that recog doesn't recognize.  */
1961 
1962 	if (GET_CODE (body) == USE /* These are just declarations.  */
1963 	    || GET_CODE (body) == CLOBBER)
1964 	  break;
1965 
1966 #ifdef HAVE_cc0
1967 	{
1968 	  /* If there is a REG_CC_SETTER note on this insn, it means that
1969 	     the setting of the condition code was done in the delay slot
1970 	     of the insn that branched here.  So recover the cc status
1971 	     from the insn that set it.  */
1972 
1973 	  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1974 	  if (note)
1975 	    {
1976 	      NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
1977 	      cc_prev_status = cc_status;
1978 	    }
1979 	}
1980 #endif
1981 
1982 	/* Detect insns that are really jump-tables
1983 	   and output them as such.  */
1984 
1985 	if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1986 	  {
1987 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
1988 	    int vlen, idx;
1989 #endif
1990 
1991 	    if (! JUMP_TABLES_IN_TEXT_SECTION)
1992 	      switch_to_section (targetm.asm_out.function_rodata_section
1993 				 (current_function_decl));
1994 	    else
1995 	      switch_to_section (current_function_section ());
1996 
1997 	    if (app_on)
1998 	      {
1999 		fputs (ASM_APP_OFF, file);
2000 		app_on = 0;
2001 	      }
2002 
2003 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2004 	    if (GET_CODE (body) == ADDR_VEC)
2005 	      {
2006 #ifdef ASM_OUTPUT_ADDR_VEC
2007 		ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2008 #else
2009 		gcc_unreachable ();
2010 #endif
2011 	      }
2012 	    else
2013 	      {
2014 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2015 		ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2016 #else
2017 		gcc_unreachable ();
2018 #endif
2019 	      }
2020 #else
2021 	    vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2022 	    for (idx = 0; idx < vlen; idx++)
2023 	      {
2024 		if (GET_CODE (body) == ADDR_VEC)
2025 		  {
2026 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2027 		    ASM_OUTPUT_ADDR_VEC_ELT
2028 		      (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2029 #else
2030 		    gcc_unreachable ();
2031 #endif
2032 		  }
2033 		else
2034 		  {
2035 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2036 		    ASM_OUTPUT_ADDR_DIFF_ELT
2037 		      (file,
2038 		       body,
2039 		       CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2040 		       CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2041 #else
2042 		    gcc_unreachable ();
2043 #endif
2044 		  }
2045 	      }
2046 #ifdef ASM_OUTPUT_CASE_END
2047 	    ASM_OUTPUT_CASE_END (file,
2048 				 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2049 				 insn);
2050 #endif
2051 #endif
2052 
2053 	    switch_to_section (current_function_section ());
2054 
2055 	    break;
2056 	  }
2057 	/* Output this line note if it is the first or the last line
2058 	   note in a row.  */
2059 	if (notice_source_line (insn))
2060 	  {
2061 	    (*debug_hooks->source_line) (last_linenum, last_filename);
2062 	  }
2063 
2064 	if (GET_CODE (body) == ASM_INPUT)
2065 	  {
2066 	    const char *string = XSTR (body, 0);
2067 
2068 	    /* There's no telling what that did to the condition codes.  */
2069 	    CC_STATUS_INIT;
2070 
2071 	    if (string[0])
2072 	      {
2073 		if (! app_on)
2074 		  {
2075 		    fputs (ASM_APP_ON, file);
2076 		    app_on = 1;
2077 		  }
2078 		fprintf (asm_out_file, "\t%s\n", string);
2079 	      }
2080 	    break;
2081 	  }
2082 
2083 	/* Detect `asm' construct with operands.  */
2084 	if (asm_noperands (body) >= 0)
2085 	  {
2086 	    unsigned int noperands = asm_noperands (body);
2087 	    rtx *ops = alloca (noperands * sizeof (rtx));
2088 	    const char *string;
2089 
2090 	    /* There's no telling what that did to the condition codes.  */
2091 	    CC_STATUS_INIT;
2092 
2093 	    /* Get out the operand values.  */
2094 	    string = decode_asm_operands (body, ops, NULL, NULL, NULL);
2095 	    /* Inhibit dieing on what would otherwise be compiler bugs.  */
2096 	    insn_noperands = noperands;
2097 	    this_is_asm_operands = insn;
2098 
2099 #ifdef FINAL_PRESCAN_INSN
2100 	    FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2101 #endif
2102 
2103 	    /* Output the insn using them.  */
2104 	    if (string[0])
2105 	      {
2106 		if (! app_on)
2107 		  {
2108 		    fputs (ASM_APP_ON, file);
2109 		    app_on = 1;
2110 		  }
2111 	        output_asm_insn (string, ops);
2112 	      }
2113 
2114 	    this_is_asm_operands = 0;
2115 	    break;
2116 	  }
2117 
2118 	if (app_on)
2119 	  {
2120 	    fputs (ASM_APP_OFF, file);
2121 	    app_on = 0;
2122 	  }
2123 
2124 	if (GET_CODE (body) == SEQUENCE)
2125 	  {
2126 	    /* A delayed-branch sequence */
2127 	    int i;
2128 
2129 	    final_sequence = body;
2130 
2131 	    /* Record the delay slots' frame information before the branch.
2132 	       This is needed for delayed calls: see execute_cfa_program().  */
2133 #if defined (DWARF2_UNWIND_INFO)
2134 	    if (dwarf2out_do_frame ())
2135 	      for (i = 1; i < XVECLEN (body, 0); i++)
2136 		dwarf2out_frame_debug (XVECEXP (body, 0, i), false);
2137 #endif
2138 
2139 	    /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2140 	       force the restoration of a comparison that was previously
2141 	       thought unnecessary.  If that happens, cancel this sequence
2142 	       and cause that insn to be restored.  */
2143 
2144 	    next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2145 	    if (next != XVECEXP (body, 0, 1))
2146 	      {
2147 		final_sequence = 0;
2148 		return next;
2149 	      }
2150 
2151 	    for (i = 1; i < XVECLEN (body, 0); i++)
2152 	      {
2153 		rtx insn = XVECEXP (body, 0, i);
2154 		rtx next = NEXT_INSN (insn);
2155 		/* We loop in case any instruction in a delay slot gets
2156 		   split.  */
2157 		do
2158 		  insn = final_scan_insn (insn, file, 0, 1, seen);
2159 		while (insn != next);
2160 	      }
2161 #ifdef DBR_OUTPUT_SEQEND
2162 	    DBR_OUTPUT_SEQEND (file);
2163 #endif
2164 	    final_sequence = 0;
2165 
2166 	    /* If the insn requiring the delay slot was a CALL_INSN, the
2167 	       insns in the delay slot are actually executed before the
2168 	       called function.  Hence we don't preserve any CC-setting
2169 	       actions in these insns and the CC must be marked as being
2170 	       clobbered by the function.  */
2171 	    if (CALL_P (XVECEXP (body, 0, 0)))
2172 	      {
2173 		CC_STATUS_INIT;
2174 	      }
2175 	    break;
2176 	  }
2177 
2178 	/* We have a real machine instruction as rtl.  */
2179 
2180 	body = PATTERN (insn);
2181 
2182 #ifdef HAVE_cc0
2183 	set = single_set (insn);
2184 
2185 	/* Check for redundant test and compare instructions
2186 	   (when the condition codes are already set up as desired).
2187 	   This is done only when optimizing; if not optimizing,
2188 	   it should be possible for the user to alter a variable
2189 	   with the debugger in between statements
2190 	   and the next statement should reexamine the variable
2191 	   to compute the condition codes.  */
2192 
2193 	if (optimize)
2194 	  {
2195 	    if (set
2196 		&& GET_CODE (SET_DEST (set)) == CC0
2197 		&& insn != last_ignored_compare)
2198 	      {
2199 		if (GET_CODE (SET_SRC (set)) == SUBREG)
2200 		  SET_SRC (set) = alter_subreg (&SET_SRC (set));
2201 		else if (GET_CODE (SET_SRC (set)) == COMPARE)
2202 		  {
2203 		    if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2204 		      XEXP (SET_SRC (set), 0)
2205 			= alter_subreg (&XEXP (SET_SRC (set), 0));
2206 		    if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2207 		      XEXP (SET_SRC (set), 1)
2208 			= alter_subreg (&XEXP (SET_SRC (set), 1));
2209 		  }
2210 		if ((cc_status.value1 != 0
2211 		     && rtx_equal_p (SET_SRC (set), cc_status.value1))
2212 		    || (cc_status.value2 != 0
2213 			&& rtx_equal_p (SET_SRC (set), cc_status.value2)))
2214 		  {
2215 		    /* Don't delete insn if it has an addressing side-effect.  */
2216 		    if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2217 			/* or if anything in it is volatile.  */
2218 			&& ! volatile_refs_p (PATTERN (insn)))
2219 		      {
2220 			/* We don't really delete the insn; just ignore it.  */
2221 			last_ignored_compare = insn;
2222 			break;
2223 		      }
2224 		  }
2225 	      }
2226 	  }
2227 #endif
2228 
2229 #ifdef HAVE_cc0
2230 	/* If this is a conditional branch, maybe modify it
2231 	   if the cc's are in a nonstandard state
2232 	   so that it accomplishes the same thing that it would
2233 	   do straightforwardly if the cc's were set up normally.  */
2234 
2235 	if (cc_status.flags != 0
2236 	    && JUMP_P (insn)
2237 	    && GET_CODE (body) == SET
2238 	    && SET_DEST (body) == pc_rtx
2239 	    && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2240 	    && COMPARISON_P (XEXP (SET_SRC (body), 0))
2241 	    && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2242 	  {
2243 	    /* This function may alter the contents of its argument
2244 	       and clear some of the cc_status.flags bits.
2245 	       It may also return 1 meaning condition now always true
2246 	       or -1 meaning condition now always false
2247 	       or 2 meaning condition nontrivial but altered.  */
2248 	    int result = alter_cond (XEXP (SET_SRC (body), 0));
2249 	    /* If condition now has fixed value, replace the IF_THEN_ELSE
2250 	       with its then-operand or its else-operand.  */
2251 	    if (result == 1)
2252 	      SET_SRC (body) = XEXP (SET_SRC (body), 1);
2253 	    if (result == -1)
2254 	      SET_SRC (body) = XEXP (SET_SRC (body), 2);
2255 
2256 	    /* The jump is now either unconditional or a no-op.
2257 	       If it has become a no-op, don't try to output it.
2258 	       (It would not be recognized.)  */
2259 	    if (SET_SRC (body) == pc_rtx)
2260 	      {
2261 	        delete_insn (insn);
2262 		break;
2263 	      }
2264 	    else if (GET_CODE (SET_SRC (body)) == RETURN)
2265 	      /* Replace (set (pc) (return)) with (return).  */
2266 	      PATTERN (insn) = body = SET_SRC (body);
2267 
2268 	    /* Rerecognize the instruction if it has changed.  */
2269 	    if (result != 0)
2270 	      INSN_CODE (insn) = -1;
2271 	  }
2272 
2273 	/* Make same adjustments to instructions that examine the
2274 	   condition codes without jumping and instructions that
2275 	   handle conditional moves (if this machine has either one).  */
2276 
2277 	if (cc_status.flags != 0
2278 	    && set != 0)
2279 	  {
2280 	    rtx cond_rtx, then_rtx, else_rtx;
2281 
2282 	    if (!JUMP_P (insn)
2283 		&& GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2284 	      {
2285 		cond_rtx = XEXP (SET_SRC (set), 0);
2286 		then_rtx = XEXP (SET_SRC (set), 1);
2287 		else_rtx = XEXP (SET_SRC (set), 2);
2288 	      }
2289 	    else
2290 	      {
2291 		cond_rtx = SET_SRC (set);
2292 		then_rtx = const_true_rtx;
2293 		else_rtx = const0_rtx;
2294 	      }
2295 
2296 	    switch (GET_CODE (cond_rtx))
2297 	      {
2298 	      case GTU:
2299 	      case GT:
2300 	      case LTU:
2301 	      case LT:
2302 	      case GEU:
2303 	      case GE:
2304 	      case LEU:
2305 	      case LE:
2306 	      case EQ:
2307 	      case NE:
2308 		{
2309 		  int result;
2310 		  if (XEXP (cond_rtx, 0) != cc0_rtx)
2311 		    break;
2312 		  result = alter_cond (cond_rtx);
2313 		  if (result == 1)
2314 		    validate_change (insn, &SET_SRC (set), then_rtx, 0);
2315 		  else if (result == -1)
2316 		    validate_change (insn, &SET_SRC (set), else_rtx, 0);
2317 		  else if (result == 2)
2318 		    INSN_CODE (insn) = -1;
2319 		  if (SET_DEST (set) == SET_SRC (set))
2320 		    delete_insn (insn);
2321 		}
2322 		break;
2323 
2324 	      default:
2325 		break;
2326 	      }
2327 	  }
2328 
2329 #endif
2330 
2331 #ifdef HAVE_peephole
2332 	/* Do machine-specific peephole optimizations if desired.  */
2333 
2334 	if (optimize && !flag_no_peephole && !nopeepholes)
2335 	  {
2336 	    rtx next = peephole (insn);
2337 	    /* When peepholing, if there were notes within the peephole,
2338 	       emit them before the peephole.  */
2339 	    if (next != 0 && next != NEXT_INSN (insn))
2340 	      {
2341 		rtx note, prev = PREV_INSN (insn);
2342 
2343 		for (note = NEXT_INSN (insn); note != next;
2344 		     note = NEXT_INSN (note))
2345 		  final_scan_insn (note, file, optimize, nopeepholes, seen);
2346 
2347 		/* Put the notes in the proper position for a later
2348 		   rescan.  For example, the SH target can do this
2349 		   when generating a far jump in a delayed branch
2350 		   sequence.  */
2351 		note = NEXT_INSN (insn);
2352 		PREV_INSN (note) = prev;
2353 		NEXT_INSN (prev) = note;
2354 		NEXT_INSN (PREV_INSN (next)) = insn;
2355 		PREV_INSN (insn) = PREV_INSN (next);
2356 		NEXT_INSN (insn) = next;
2357 		PREV_INSN (next) = insn;
2358 	      }
2359 
2360 	    /* PEEPHOLE might have changed this.  */
2361 	    body = PATTERN (insn);
2362 	  }
2363 #endif
2364 
2365 	/* Try to recognize the instruction.
2366 	   If successful, verify that the operands satisfy the
2367 	   constraints for the instruction.  Crash if they don't,
2368 	   since `reload' should have changed them so that they do.  */
2369 
2370 	insn_code_number = recog_memoized (insn);
2371 	cleanup_subreg_operands (insn);
2372 
2373 	/* Dump the insn in the assembly for debugging.  */
2374 	if (flag_dump_rtl_in_asm)
2375 	  {
2376 	    print_rtx_head = ASM_COMMENT_START;
2377 	    print_rtl_single (asm_out_file, insn);
2378 	    print_rtx_head = "";
2379 	  }
2380 
2381 	if (! constrain_operands_cached (1))
2382 	  fatal_insn_not_found (insn);
2383 
2384 	/* Some target machines need to prescan each insn before
2385 	   it is output.  */
2386 
2387 #ifdef FINAL_PRESCAN_INSN
2388 	FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2389 #endif
2390 
2391 #ifdef HAVE_conditional_execution
2392 	if (GET_CODE (PATTERN (insn)) == COND_EXEC)
2393 	  current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2394 #endif
2395 
2396 #ifdef HAVE_cc0
2397 	cc_prev_status = cc_status;
2398 
2399 	/* Update `cc_status' for this instruction.
2400 	   The instruction's output routine may change it further.
2401 	   If the output routine for a jump insn needs to depend
2402 	   on the cc status, it should look at cc_prev_status.  */
2403 
2404 	NOTICE_UPDATE_CC (body, insn);
2405 #endif
2406 
2407 	current_output_insn = debug_insn = insn;
2408 
2409 #if defined (DWARF2_UNWIND_INFO)
2410 	if (CALL_P (insn) && dwarf2out_do_frame ())
2411 	  dwarf2out_frame_debug (insn, false);
2412 #endif
2413 
2414 	/* Find the proper template for this insn.  */
2415 	template = get_insn_template (insn_code_number, insn);
2416 
2417 	/* If the C code returns 0, it means that it is a jump insn
2418 	   which follows a deleted test insn, and that test insn
2419 	   needs to be reinserted.  */
2420 	if (template == 0)
2421 	  {
2422 	    rtx prev;
2423 
2424 	    gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2425 
2426 	    /* We have already processed the notes between the setter and
2427 	       the user.  Make sure we don't process them again, this is
2428 	       particularly important if one of the notes is a block
2429 	       scope note or an EH note.  */
2430 	    for (prev = insn;
2431 		 prev != last_ignored_compare;
2432 		 prev = PREV_INSN (prev))
2433 	      {
2434 		if (NOTE_P (prev))
2435 		  delete_insn (prev);	/* Use delete_note.  */
2436 	      }
2437 
2438 	    return prev;
2439 	  }
2440 
2441 	/* If the template is the string "#", it means that this insn must
2442 	   be split.  */
2443 	if (template[0] == '#' && template[1] == '\0')
2444 	  {
2445 	    rtx new = try_split (body, insn, 0);
2446 
2447 	    /* If we didn't split the insn, go away.  */
2448 	    if (new == insn && PATTERN (new) == body)
2449 	      fatal_insn ("could not split insn", insn);
2450 
2451 #ifdef HAVE_ATTR_length
2452 	    /* This instruction should have been split in shorten_branches,
2453 	       to ensure that we would have valid length info for the
2454 	       splitees.  */
2455 	    gcc_unreachable ();
2456 #endif
2457 
2458 	    return new;
2459 	  }
2460 
2461 #ifdef TARGET_UNWIND_INFO
2462 	/* ??? This will put the directives in the wrong place if
2463 	   get_insn_template outputs assembly directly.  However calling it
2464 	   before get_insn_template breaks if the insns is split.  */
2465 	targetm.asm_out.unwind_emit (asm_out_file, insn);
2466 #endif
2467 
2468 	/* Output assembler code from the template.  */
2469 	output_asm_insn (template, recog_data.operand);
2470 
2471 	/* If necessary, report the effect that the instruction has on
2472 	   the unwind info.   We've already done this for delay slots
2473 	   and call instructions.  */
2474 #if defined (DWARF2_UNWIND_INFO)
2475 	if (final_sequence == 0
2476 #if !defined (HAVE_prologue)
2477 	    && !ACCUMULATE_OUTGOING_ARGS
2478 #endif
2479 	    && dwarf2out_do_frame ())
2480 	  dwarf2out_frame_debug (insn, true);
2481 #endif
2482 
2483 	current_output_insn = debug_insn = 0;
2484       }
2485     }
2486   return NEXT_INSN (insn);
2487 }
2488 
2489 /* Return whether a source line note needs to be emitted before INSN.  */
2490 
2491 static bool
notice_source_line(rtx insn)2492 notice_source_line (rtx insn)
2493 {
2494   const char *filename = insn_file (insn);
2495   int linenum = insn_line (insn);
2496 
2497   if (filename
2498       && (force_source_line
2499 	  || filename != last_filename
2500 	  || last_linenum != linenum))
2501     {
2502       force_source_line = false;
2503       last_filename = filename;
2504       last_linenum = linenum;
2505       high_block_linenum = MAX (last_linenum, high_block_linenum);
2506       high_function_linenum = MAX (last_linenum, high_function_linenum);
2507       return true;
2508     }
2509   return false;
2510 }
2511 
2512 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2513    directly to the desired hard register.  */
2514 
2515 void
cleanup_subreg_operands(rtx insn)2516 cleanup_subreg_operands (rtx insn)
2517 {
2518   int i;
2519   extract_insn_cached (insn);
2520   for (i = 0; i < recog_data.n_operands; i++)
2521     {
2522       /* The following test cannot use recog_data.operand when testing
2523 	 for a SUBREG: the underlying object might have been changed
2524 	 already if we are inside a match_operator expression that
2525 	 matches the else clause.  Instead we test the underlying
2526 	 expression directly.  */
2527       if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2528 	recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2529       else if (GET_CODE (recog_data.operand[i]) == PLUS
2530 	       || GET_CODE (recog_data.operand[i]) == MULT
2531 	       || MEM_P (recog_data.operand[i]))
2532 	recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i]);
2533     }
2534 
2535   for (i = 0; i < recog_data.n_dups; i++)
2536     {
2537       if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2538 	*recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2539       else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2540 	       || GET_CODE (*recog_data.dup_loc[i]) == MULT
2541 	       || MEM_P (*recog_data.dup_loc[i]))
2542 	*recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i]);
2543     }
2544 }
2545 
2546 /* If X is a SUBREG, replace it with a REG or a MEM,
2547    based on the thing it is a subreg of.  */
2548 
2549 rtx
alter_subreg(rtx * xp)2550 alter_subreg (rtx *xp)
2551 {
2552   rtx x = *xp;
2553   rtx y = SUBREG_REG (x);
2554 
2555   /* simplify_subreg does not remove subreg from volatile references.
2556      We are required to.  */
2557   if (MEM_P (y))
2558     {
2559       int offset = SUBREG_BYTE (x);
2560 
2561       /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2562 	 contains 0 instead of the proper offset.  See simplify_subreg.  */
2563       if (offset == 0
2564 	  && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
2565         {
2566           int difference = GET_MODE_SIZE (GET_MODE (y))
2567 			   - GET_MODE_SIZE (GET_MODE (x));
2568           if (WORDS_BIG_ENDIAN)
2569             offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
2570           if (BYTES_BIG_ENDIAN)
2571             offset += difference % UNITS_PER_WORD;
2572         }
2573 
2574       *xp = adjust_address (y, GET_MODE (x), offset);
2575     }
2576   else
2577     {
2578       rtx new = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2579 				 SUBREG_BYTE (x));
2580 
2581       if (new != 0)
2582 	*xp = new;
2583       else if (REG_P (y))
2584 	{
2585 	  /* Simplify_subreg can't handle some REG cases, but we have to.  */
2586 	  unsigned int regno = subreg_regno (x);
2587 	  *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, SUBREG_BYTE (x));
2588 	}
2589     }
2590 
2591   return *xp;
2592 }
2593 
2594 /* Do alter_subreg on all the SUBREGs contained in X.  */
2595 
2596 static rtx
walk_alter_subreg(rtx * xp)2597 walk_alter_subreg (rtx *xp)
2598 {
2599   rtx x = *xp;
2600   switch (GET_CODE (x))
2601     {
2602     case PLUS:
2603     case MULT:
2604     case AND:
2605       XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2606       XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1));
2607       break;
2608 
2609     case MEM:
2610     case ZERO_EXTEND:
2611       XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2612       break;
2613 
2614     case SUBREG:
2615       return alter_subreg (xp);
2616 
2617     default:
2618       break;
2619     }
2620 
2621   return *xp;
2622 }
2623 
2624 #ifdef HAVE_cc0
2625 
2626 /* Given BODY, the body of a jump instruction, alter the jump condition
2627    as required by the bits that are set in cc_status.flags.
2628    Not all of the bits there can be handled at this level in all cases.
2629 
2630    The value is normally 0.
2631    1 means that the condition has become always true.
2632    -1 means that the condition has become always false.
2633    2 means that COND has been altered.  */
2634 
2635 static int
alter_cond(rtx cond)2636 alter_cond (rtx cond)
2637 {
2638   int value = 0;
2639 
2640   if (cc_status.flags & CC_REVERSED)
2641     {
2642       value = 2;
2643       PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2644     }
2645 
2646   if (cc_status.flags & CC_INVERTED)
2647     {
2648       value = 2;
2649       PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2650     }
2651 
2652   if (cc_status.flags & CC_NOT_POSITIVE)
2653     switch (GET_CODE (cond))
2654       {
2655       case LE:
2656       case LEU:
2657       case GEU:
2658 	/* Jump becomes unconditional.  */
2659 	return 1;
2660 
2661       case GT:
2662       case GTU:
2663       case LTU:
2664 	/* Jump becomes no-op.  */
2665 	return -1;
2666 
2667       case GE:
2668 	PUT_CODE (cond, EQ);
2669 	value = 2;
2670 	break;
2671 
2672       case LT:
2673 	PUT_CODE (cond, NE);
2674 	value = 2;
2675 	break;
2676 
2677       default:
2678 	break;
2679       }
2680 
2681   if (cc_status.flags & CC_NOT_NEGATIVE)
2682     switch (GET_CODE (cond))
2683       {
2684       case GE:
2685       case GEU:
2686 	/* Jump becomes unconditional.  */
2687 	return 1;
2688 
2689       case LT:
2690       case LTU:
2691 	/* Jump becomes no-op.  */
2692 	return -1;
2693 
2694       case LE:
2695       case LEU:
2696 	PUT_CODE (cond, EQ);
2697 	value = 2;
2698 	break;
2699 
2700       case GT:
2701       case GTU:
2702 	PUT_CODE (cond, NE);
2703 	value = 2;
2704 	break;
2705 
2706       default:
2707 	break;
2708       }
2709 
2710   if (cc_status.flags & CC_NO_OVERFLOW)
2711     switch (GET_CODE (cond))
2712       {
2713       case GEU:
2714 	/* Jump becomes unconditional.  */
2715 	return 1;
2716 
2717       case LEU:
2718 	PUT_CODE (cond, EQ);
2719 	value = 2;
2720 	break;
2721 
2722       case GTU:
2723 	PUT_CODE (cond, NE);
2724 	value = 2;
2725 	break;
2726 
2727       case LTU:
2728 	/* Jump becomes no-op.  */
2729 	return -1;
2730 
2731       default:
2732 	break;
2733       }
2734 
2735   if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
2736     switch (GET_CODE (cond))
2737       {
2738       default:
2739 	gcc_unreachable ();
2740 
2741       case NE:
2742 	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
2743 	value = 2;
2744 	break;
2745 
2746       case EQ:
2747 	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
2748 	value = 2;
2749 	break;
2750       }
2751 
2752   if (cc_status.flags & CC_NOT_SIGNED)
2753     /* The flags are valid if signed condition operators are converted
2754        to unsigned.  */
2755     switch (GET_CODE (cond))
2756       {
2757       case LE:
2758 	PUT_CODE (cond, LEU);
2759 	value = 2;
2760 	break;
2761 
2762       case LT:
2763 	PUT_CODE (cond, LTU);
2764 	value = 2;
2765 	break;
2766 
2767       case GT:
2768 	PUT_CODE (cond, GTU);
2769 	value = 2;
2770 	break;
2771 
2772       case GE:
2773 	PUT_CODE (cond, GEU);
2774 	value = 2;
2775 	break;
2776 
2777       default:
2778 	break;
2779       }
2780 
2781   return value;
2782 }
2783 #endif
2784 
2785 /* Report inconsistency between the assembler template and the operands.
2786    In an `asm', it's the user's fault; otherwise, the compiler's fault.  */
2787 
2788 void
output_operand_lossage(const char * cmsgid,...)2789 output_operand_lossage (const char *cmsgid, ...)
2790 {
2791   char *fmt_string;
2792   char *new_message;
2793   const char *pfx_str;
2794   va_list ap;
2795 
2796   va_start (ap, cmsgid);
2797 
2798   pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
2799   asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
2800   vasprintf (&new_message, fmt_string, ap);
2801 
2802   if (this_is_asm_operands)
2803     error_for_asm (this_is_asm_operands, "%s", new_message);
2804   else
2805     internal_error ("%s", new_message);
2806 
2807   free (fmt_string);
2808   free (new_message);
2809   va_end (ap);
2810 }
2811 
2812 /* Output of assembler code from a template, and its subroutines.  */
2813 
2814 /* Annotate the assembly with a comment describing the pattern and
2815    alternative used.  */
2816 
2817 static void
output_asm_name(void)2818 output_asm_name (void)
2819 {
2820   if (debug_insn)
2821     {
2822       int num = INSN_CODE (debug_insn);
2823       fprintf (asm_out_file, "\t%s %d\t%s",
2824 	       ASM_COMMENT_START, INSN_UID (debug_insn),
2825 	       insn_data[num].name);
2826       if (insn_data[num].n_alternatives > 1)
2827 	fprintf (asm_out_file, "/%d", which_alternative + 1);
2828 #ifdef HAVE_ATTR_length
2829       fprintf (asm_out_file, "\t[length = %d]",
2830 	       get_attr_length (debug_insn));
2831 #endif
2832       /* Clear this so only the first assembler insn
2833 	 of any rtl insn will get the special comment for -dp.  */
2834       debug_insn = 0;
2835     }
2836 }
2837 
2838 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
2839    or its address, return that expr .  Set *PADDRESSP to 1 if the expr
2840    corresponds to the address of the object and 0 if to the object.  */
2841 
2842 static tree
get_mem_expr_from_op(rtx op,int * paddressp)2843 get_mem_expr_from_op (rtx op, int *paddressp)
2844 {
2845   tree expr;
2846   int inner_addressp;
2847 
2848   *paddressp = 0;
2849 
2850   if (REG_P (op))
2851     return REG_EXPR (op);
2852   else if (!MEM_P (op))
2853     return 0;
2854 
2855   if (MEM_EXPR (op) != 0)
2856     return MEM_EXPR (op);
2857 
2858   /* Otherwise we have an address, so indicate it and look at the address.  */
2859   *paddressp = 1;
2860   op = XEXP (op, 0);
2861 
2862   /* First check if we have a decl for the address, then look at the right side
2863      if it is a PLUS.  Otherwise, strip off arithmetic and keep looking.
2864      But don't allow the address to itself be indirect.  */
2865   if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
2866     return expr;
2867   else if (GET_CODE (op) == PLUS
2868 	   && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
2869     return expr;
2870 
2871   while (GET_RTX_CLASS (GET_CODE (op)) == RTX_UNARY
2872 	 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
2873     op = XEXP (op, 0);
2874 
2875   expr = get_mem_expr_from_op (op, &inner_addressp);
2876   return inner_addressp ? 0 : expr;
2877 }
2878 
2879 /* Output operand names for assembler instructions.  OPERANDS is the
2880    operand vector, OPORDER is the order to write the operands, and NOPS
2881    is the number of operands to write.  */
2882 
2883 static void
output_asm_operand_names(rtx * operands,int * oporder,int nops)2884 output_asm_operand_names (rtx *operands, int *oporder, int nops)
2885 {
2886   int wrote = 0;
2887   int i;
2888 
2889   for (i = 0; i < nops; i++)
2890     {
2891       int addressp;
2892       rtx op = operands[oporder[i]];
2893       tree expr = get_mem_expr_from_op (op, &addressp);
2894 
2895       fprintf (asm_out_file, "%c%s",
2896 	       wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
2897       wrote = 1;
2898       if (expr)
2899 	{
2900 	  fprintf (asm_out_file, "%s",
2901 		   addressp ? "*" : "");
2902 	  print_mem_expr (asm_out_file, expr);
2903 	  wrote = 1;
2904 	}
2905       else if (REG_P (op) && ORIGINAL_REGNO (op)
2906 	       && ORIGINAL_REGNO (op) != REGNO (op))
2907 	fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
2908     }
2909 }
2910 
2911 /* Output text from TEMPLATE to the assembler output file,
2912    obeying %-directions to substitute operands taken from
2913    the vector OPERANDS.
2914 
2915    %N (for N a digit) means print operand N in usual manner.
2916    %lN means require operand N to be a CODE_LABEL or LABEL_REF
2917       and print the label name with no punctuation.
2918    %cN means require operand N to be a constant
2919       and print the constant expression with no punctuation.
2920    %aN means expect operand N to be a memory address
2921       (not a memory reference!) and print a reference
2922       to that address.
2923    %nN means expect operand N to be a constant
2924       and print a constant expression for minus the value
2925       of the operand, with no other punctuation.  */
2926 
2927 void
output_asm_insn(const char * template,rtx * operands)2928 output_asm_insn (const char *template, rtx *operands)
2929 {
2930   const char *p;
2931   int c;
2932 #ifdef ASSEMBLER_DIALECT
2933   int dialect = 0;
2934 #endif
2935   int oporder[MAX_RECOG_OPERANDS];
2936   char opoutput[MAX_RECOG_OPERANDS];
2937   int ops = 0;
2938 
2939   /* An insn may return a null string template
2940      in a case where no assembler code is needed.  */
2941   if (*template == 0)
2942     return;
2943 
2944   memset (opoutput, 0, sizeof opoutput);
2945   p = template;
2946   putc ('\t', asm_out_file);
2947 
2948 #ifdef ASM_OUTPUT_OPCODE
2949   ASM_OUTPUT_OPCODE (asm_out_file, p);
2950 #endif
2951 
2952   while ((c = *p++))
2953     switch (c)
2954       {
2955       case '\n':
2956 	if (flag_verbose_asm)
2957 	  output_asm_operand_names (operands, oporder, ops);
2958 	if (flag_print_asm_name)
2959 	  output_asm_name ();
2960 
2961 	ops = 0;
2962 	memset (opoutput, 0, sizeof opoutput);
2963 
2964 	putc (c, asm_out_file);
2965 #ifdef ASM_OUTPUT_OPCODE
2966 	while ((c = *p) == '\t')
2967 	  {
2968 	    putc (c, asm_out_file);
2969 	    p++;
2970 	  }
2971 	ASM_OUTPUT_OPCODE (asm_out_file, p);
2972 #endif
2973 	break;
2974 
2975 #ifdef ASSEMBLER_DIALECT
2976       case '{':
2977 	{
2978 	  int i;
2979 
2980 	  if (dialect)
2981 	    output_operand_lossage ("nested assembly dialect alternatives");
2982 	  else
2983 	    dialect = 1;
2984 
2985 	  /* If we want the first dialect, do nothing.  Otherwise, skip
2986 	     DIALECT_NUMBER of strings ending with '|'.  */
2987 	  for (i = 0; i < dialect_number; i++)
2988 	    {
2989 	      while (*p && *p != '}' && *p++ != '|')
2990 		;
2991 	      if (*p == '}')
2992 		break;
2993 	      if (*p == '|')
2994 		p++;
2995 	    }
2996 
2997 	  if (*p == '\0')
2998 	    output_operand_lossage ("unterminated assembly dialect alternative");
2999 	}
3000 	break;
3001 
3002       case '|':
3003 	if (dialect)
3004 	  {
3005 	    /* Skip to close brace.  */
3006 	    do
3007 	      {
3008 		if (*p == '\0')
3009 		  {
3010 		    output_operand_lossage ("unterminated assembly dialect alternative");
3011 		    break;
3012 		  }
3013 	      }
3014 	    while (*p++ != '}');
3015 	    dialect = 0;
3016 	  }
3017 	else
3018 	  putc (c, asm_out_file);
3019 	break;
3020 
3021       case '}':
3022 	if (! dialect)
3023 	  putc (c, asm_out_file);
3024 	dialect = 0;
3025 	break;
3026 #endif
3027 
3028       case '%':
3029 	/* %% outputs a single %.  */
3030 	if (*p == '%')
3031 	  {
3032 	    p++;
3033 	    putc (c, asm_out_file);
3034 	  }
3035 	/* %= outputs a number which is unique to each insn in the entire
3036 	   compilation.  This is useful for making local labels that are
3037 	   referred to more than once in a given insn.  */
3038 	else if (*p == '=')
3039 	  {
3040 	    p++;
3041 	    fprintf (asm_out_file, "%d", insn_counter);
3042 	  }
3043 	/* % followed by a letter and some digits
3044 	   outputs an operand in a special way depending on the letter.
3045 	   Letters `acln' are implemented directly.
3046 	   Other letters are passed to `output_operand' so that
3047 	   the PRINT_OPERAND macro can define them.  */
3048 	else if (ISALPHA (*p))
3049 	  {
3050 	    int letter = *p++;
3051 	    unsigned long opnum;
3052 	    char *endptr;
3053 
3054 	    opnum = strtoul (p, &endptr, 10);
3055 
3056 	    if (endptr == p)
3057 	      output_operand_lossage ("operand number missing "
3058 				      "after %%-letter");
3059 	    else if (this_is_asm_operands && opnum >= insn_noperands)
3060 	      output_operand_lossage ("operand number out of range");
3061 	    else if (letter == 'l')
3062 	      output_asm_label (operands[opnum]);
3063 	    else if (letter == 'a')
3064 	      output_address (operands[opnum]);
3065 	    else if (letter == 'c')
3066 	      {
3067 		if (CONSTANT_ADDRESS_P (operands[opnum]))
3068 		  output_addr_const (asm_out_file, operands[opnum]);
3069 		else
3070 		  output_operand (operands[opnum], 'c');
3071 	      }
3072 	    else if (letter == 'n')
3073 	      {
3074 		if (GET_CODE (operands[opnum]) == CONST_INT)
3075 		  fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3076 			   - INTVAL (operands[opnum]));
3077 		else
3078 		  {
3079 		    putc ('-', asm_out_file);
3080 		    output_addr_const (asm_out_file, operands[opnum]);
3081 		  }
3082 	      }
3083 	    else
3084 	      output_operand (operands[opnum], letter);
3085 
3086 	    if (!opoutput[opnum])
3087 	      oporder[ops++] = opnum;
3088 	    opoutput[opnum] = 1;
3089 
3090 	    p = endptr;
3091 	    c = *p;
3092 	  }
3093 	/* % followed by a digit outputs an operand the default way.  */
3094 	else if (ISDIGIT (*p))
3095 	  {
3096 	    unsigned long opnum;
3097 	    char *endptr;
3098 
3099 	    opnum = strtoul (p, &endptr, 10);
3100 	    if (this_is_asm_operands && opnum >= insn_noperands)
3101 	      output_operand_lossage ("operand number out of range");
3102 	    else
3103 	      output_operand (operands[opnum], 0);
3104 
3105 	    if (!opoutput[opnum])
3106 	      oporder[ops++] = opnum;
3107 	    opoutput[opnum] = 1;
3108 
3109 	    p = endptr;
3110 	    c = *p;
3111 	  }
3112 	/* % followed by punctuation: output something for that
3113 	   punctuation character alone, with no operand.
3114 	   The PRINT_OPERAND macro decides what is actually done.  */
3115 #ifdef PRINT_OPERAND_PUNCT_VALID_P
3116 	else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3117 	  output_operand (NULL_RTX, *p++);
3118 #endif
3119 	else
3120 	  output_operand_lossage ("invalid %%-code");
3121 	break;
3122 
3123       default:
3124 	putc (c, asm_out_file);
3125       }
3126 
3127   /* Write out the variable names for operands, if we know them.  */
3128   if (flag_verbose_asm)
3129     output_asm_operand_names (operands, oporder, ops);
3130   if (flag_print_asm_name)
3131     output_asm_name ();
3132 
3133   putc ('\n', asm_out_file);
3134 }
3135 
3136 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol.  */
3137 
3138 void
output_asm_label(rtx x)3139 output_asm_label (rtx x)
3140 {
3141   char buf[256];
3142 
3143   if (GET_CODE (x) == LABEL_REF)
3144     x = XEXP (x, 0);
3145   if (LABEL_P (x)
3146       || (NOTE_P (x)
3147 	  && NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
3148     ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3149   else
3150     output_operand_lossage ("'%%l' operand isn't a label");
3151 
3152   assemble_name (asm_out_file, buf);
3153 }
3154 
3155 /* Print operand X using machine-dependent assembler syntax.
3156    The macro PRINT_OPERAND is defined just to control this function.
3157    CODE is a non-digit that preceded the operand-number in the % spec,
3158    such as 'z' if the spec was `%z3'.  CODE is 0 if there was no char
3159    between the % and the digits.
3160    When CODE is a non-letter, X is 0.
3161 
3162    The meanings of the letters are machine-dependent and controlled
3163    by PRINT_OPERAND.  */
3164 
3165 static void
output_operand(rtx x,int code ATTRIBUTE_UNUSED)3166 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3167 {
3168   if (x && GET_CODE (x) == SUBREG)
3169     x = alter_subreg (&x);
3170 
3171   /* X must not be a pseudo reg.  */
3172   gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3173 
3174   PRINT_OPERAND (asm_out_file, x, code);
3175 }
3176 
3177 /* Print a memory reference operand for address X
3178    using machine-dependent assembler syntax.
3179    The macro PRINT_OPERAND_ADDRESS exists just to control this function.  */
3180 
3181 void
output_address(rtx x)3182 output_address (rtx x)
3183 {
3184   walk_alter_subreg (&x);
3185   PRINT_OPERAND_ADDRESS (asm_out_file, x);
3186 }
3187 
3188 /* Print an integer constant expression in assembler syntax.
3189    Addition and subtraction are the only arithmetic
3190    that may appear in these expressions.  */
3191 
3192 void
output_addr_const(FILE * file,rtx x)3193 output_addr_const (FILE *file, rtx x)
3194 {
3195   char buf[256];
3196 
3197  restart:
3198   switch (GET_CODE (x))
3199     {
3200     case PC:
3201       putc ('.', file);
3202       break;
3203 
3204     case SYMBOL_REF:
3205       if (SYMBOL_REF_DECL (x))
3206 	mark_decl_referenced (SYMBOL_REF_DECL (x));
3207 #ifdef ASM_OUTPUT_SYMBOL_REF
3208       ASM_OUTPUT_SYMBOL_REF (file, x);
3209 #else
3210       assemble_name (file, XSTR (x, 0));
3211 #endif
3212       break;
3213 
3214     case LABEL_REF:
3215       x = XEXP (x, 0);
3216       /* Fall through.  */
3217     case CODE_LABEL:
3218       ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3219 #ifdef ASM_OUTPUT_LABEL_REF
3220       ASM_OUTPUT_LABEL_REF (file, buf);
3221 #else
3222       assemble_name (file, buf);
3223 #endif
3224       break;
3225 
3226     case CONST_INT:
3227       fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3228       break;
3229 
3230     case CONST:
3231       /* This used to output parentheses around the expression,
3232 	 but that does not work on the 386 (either ATT or BSD assembler).  */
3233       output_addr_const (file, XEXP (x, 0));
3234       break;
3235 
3236     case CONST_DOUBLE:
3237       if (GET_MODE (x) == VOIDmode)
3238 	{
3239 	  /* We can use %d if the number is one word and positive.  */
3240 	  if (CONST_DOUBLE_HIGH (x))
3241 	    fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3242 		     CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
3243 	  else if (CONST_DOUBLE_LOW (x) < 0)
3244 	    fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
3245 	  else
3246 	    fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3247 	}
3248       else
3249 	/* We can't handle floating point constants;
3250 	   PRINT_OPERAND must handle them.  */
3251 	output_operand_lossage ("floating constant misused");
3252       break;
3253 
3254     case PLUS:
3255       /* Some assemblers need integer constants to appear last (eg masm).  */
3256       if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3257 	{
3258 	  output_addr_const (file, XEXP (x, 1));
3259 	  if (INTVAL (XEXP (x, 0)) >= 0)
3260 	    fprintf (file, "+");
3261 	  output_addr_const (file, XEXP (x, 0));
3262 	}
3263       else
3264 	{
3265 	  output_addr_const (file, XEXP (x, 0));
3266 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT
3267 	      || INTVAL (XEXP (x, 1)) >= 0)
3268 	    fprintf (file, "+");
3269 	  output_addr_const (file, XEXP (x, 1));
3270 	}
3271       break;
3272 
3273     case MINUS:
3274       /* Avoid outputting things like x-x or x+5-x,
3275 	 since some assemblers can't handle that.  */
3276       x = simplify_subtraction (x);
3277       if (GET_CODE (x) != MINUS)
3278 	goto restart;
3279 
3280       output_addr_const (file, XEXP (x, 0));
3281       fprintf (file, "-");
3282       if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
3283 	  || GET_CODE (XEXP (x, 1)) == PC
3284 	  || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3285 	output_addr_const (file, XEXP (x, 1));
3286       else
3287 	{
3288 	  fputs (targetm.asm_out.open_paren, file);
3289 	  output_addr_const (file, XEXP (x, 1));
3290 	  fputs (targetm.asm_out.close_paren, file);
3291 	}
3292       break;
3293 
3294     case ZERO_EXTEND:
3295     case SIGN_EXTEND:
3296     case SUBREG:
3297       output_addr_const (file, XEXP (x, 0));
3298       break;
3299 
3300     default:
3301 #ifdef OUTPUT_ADDR_CONST_EXTRA
3302       OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3303       break;
3304 
3305     fail:
3306 #endif
3307       output_operand_lossage ("invalid expression as operand");
3308     }
3309 }
3310 
3311 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3312    %R prints the value of REGISTER_PREFIX.
3313    %L prints the value of LOCAL_LABEL_PREFIX.
3314    %U prints the value of USER_LABEL_PREFIX.
3315    %I prints the value of IMMEDIATE_PREFIX.
3316    %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3317    Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3318 
3319    We handle alternate assembler dialects here, just like output_asm_insn.  */
3320 
3321 void
asm_fprintf(FILE * file,const char * p,...)3322 asm_fprintf (FILE *file, const char *p, ...)
3323 {
3324   char buf[10];
3325   char *q, c;
3326   va_list argptr;
3327 
3328   va_start (argptr, p);
3329 
3330   buf[0] = '%';
3331 
3332   while ((c = *p++))
3333     switch (c)
3334       {
3335 #ifdef ASSEMBLER_DIALECT
3336       case '{':
3337 	{
3338 	  int i;
3339 
3340 	  /* If we want the first dialect, do nothing.  Otherwise, skip
3341 	     DIALECT_NUMBER of strings ending with '|'.  */
3342 	  for (i = 0; i < dialect_number; i++)
3343 	    {
3344 	      while (*p && *p++ != '|')
3345 		;
3346 
3347 	      if (*p == '|')
3348 		p++;
3349 	    }
3350 	}
3351 	break;
3352 
3353       case '|':
3354 	/* Skip to close brace.  */
3355 	while (*p && *p++ != '}')
3356 	  ;
3357 	break;
3358 
3359       case '}':
3360 	break;
3361 #endif
3362 
3363       case '%':
3364 	c = *p++;
3365 	q = &buf[1];
3366 	while (strchr ("-+ #0", c))
3367 	  {
3368 	    *q++ = c;
3369 	    c = *p++;
3370 	  }
3371 	while (ISDIGIT (c) || c == '.')
3372 	  {
3373 	    *q++ = c;
3374 	    c = *p++;
3375 	  }
3376 	switch (c)
3377 	  {
3378 	  case '%':
3379 	    putc ('%', file);
3380 	    break;
3381 
3382 	  case 'd':  case 'i':  case 'u':
3383 	  case 'x':  case 'X':  case 'o':
3384 	  case 'c':
3385 	    *q++ = c;
3386 	    *q = 0;
3387 	    fprintf (file, buf, va_arg (argptr, int));
3388 	    break;
3389 
3390 	  case 'w':
3391 	    /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3392 	       'o' cases, but we do not check for those cases.  It
3393 	       means that the value is a HOST_WIDE_INT, which may be
3394 	       either `long' or `long long'.  */
3395 	    memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3396 	    q += strlen (HOST_WIDE_INT_PRINT);
3397 	    *q++ = *p++;
3398 	    *q = 0;
3399 	    fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3400 	    break;
3401 
3402 	  case 'l':
3403 	    *q++ = c;
3404 #ifdef HAVE_LONG_LONG
3405 	    if (*p == 'l')
3406 	      {
3407 		*q++ = *p++;
3408 		*q++ = *p++;
3409 		*q = 0;
3410 		fprintf (file, buf, va_arg (argptr, long long));
3411 	      }
3412 	    else
3413 #endif
3414 	      {
3415 		*q++ = *p++;
3416 		*q = 0;
3417 		fprintf (file, buf, va_arg (argptr, long));
3418 	      }
3419 
3420 	    break;
3421 
3422 	  case 's':
3423 	    *q++ = c;
3424 	    *q = 0;
3425 	    fprintf (file, buf, va_arg (argptr, char *));
3426 	    break;
3427 
3428 	  case 'O':
3429 #ifdef ASM_OUTPUT_OPCODE
3430 	    ASM_OUTPUT_OPCODE (asm_out_file, p);
3431 #endif
3432 	    break;
3433 
3434 	  case 'R':
3435 #ifdef REGISTER_PREFIX
3436 	    fprintf (file, "%s", REGISTER_PREFIX);
3437 #endif
3438 	    break;
3439 
3440 	  case 'I':
3441 #ifdef IMMEDIATE_PREFIX
3442 	    fprintf (file, "%s", IMMEDIATE_PREFIX);
3443 #endif
3444 	    break;
3445 
3446 	  case 'L':
3447 #ifdef LOCAL_LABEL_PREFIX
3448 	    fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3449 #endif
3450 	    break;
3451 
3452 	  case 'U':
3453 	    fputs (user_label_prefix, file);
3454 	    break;
3455 
3456 #ifdef ASM_FPRINTF_EXTENSIONS
3457 	    /* Uppercase letters are reserved for general use by asm_fprintf
3458 	       and so are not available to target specific code.  In order to
3459 	       prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3460 	       they are defined here.  As they get turned into real extensions
3461 	       to asm_fprintf they should be removed from this list.  */
3462 	  case 'A': case 'B': case 'C': case 'D': case 'E':
3463 	  case 'F': case 'G': case 'H': case 'J': case 'K':
3464 	  case 'M': case 'N': case 'P': case 'Q': case 'S':
3465 	  case 'T': case 'V': case 'W': case 'Y': case 'Z':
3466 	    break;
3467 
3468 	  ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3469 #endif
3470 	  default:
3471 	    gcc_unreachable ();
3472 	  }
3473 	break;
3474 
3475       default:
3476 	putc (c, file);
3477       }
3478   va_end (argptr);
3479 }
3480 
3481 /* Split up a CONST_DOUBLE or integer constant rtx
3482    into two rtx's for single words,
3483    storing in *FIRST the word that comes first in memory in the target
3484    and in *SECOND the other.  */
3485 
3486 void
split_double(rtx value,rtx * first,rtx * second)3487 split_double (rtx value, rtx *first, rtx *second)
3488 {
3489   if (GET_CODE (value) == CONST_INT)
3490     {
3491       if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3492 	{
3493 	  /* In this case the CONST_INT holds both target words.
3494 	     Extract the bits from it into two word-sized pieces.
3495 	     Sign extend each half to HOST_WIDE_INT.  */
3496 	  unsigned HOST_WIDE_INT low, high;
3497 	  unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3498 
3499 	  /* Set sign_bit to the most significant bit of a word.  */
3500 	  sign_bit = 1;
3501 	  sign_bit <<= BITS_PER_WORD - 1;
3502 
3503 	  /* Set mask so that all bits of the word are set.  We could
3504 	     have used 1 << BITS_PER_WORD instead of basing the
3505 	     calculation on sign_bit.  However, on machines where
3506 	     HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3507 	     compiler warning, even though the code would never be
3508 	     executed.  */
3509 	  mask = sign_bit << 1;
3510 	  mask--;
3511 
3512 	  /* Set sign_extend as any remaining bits.  */
3513 	  sign_extend = ~mask;
3514 
3515 	  /* Pick the lower word and sign-extend it.  */
3516 	  low = INTVAL (value);
3517 	  low &= mask;
3518 	  if (low & sign_bit)
3519 	    low |= sign_extend;
3520 
3521 	  /* Pick the higher word, shifted to the least significant
3522 	     bits, and sign-extend it.  */
3523 	  high = INTVAL (value);
3524 	  high >>= BITS_PER_WORD - 1;
3525 	  high >>= 1;
3526 	  high &= mask;
3527 	  if (high & sign_bit)
3528 	    high |= sign_extend;
3529 
3530 	  /* Store the words in the target machine order.  */
3531 	  if (WORDS_BIG_ENDIAN)
3532 	    {
3533 	      *first = GEN_INT (high);
3534 	      *second = GEN_INT (low);
3535 	    }
3536 	  else
3537 	    {
3538 	      *first = GEN_INT (low);
3539 	      *second = GEN_INT (high);
3540 	    }
3541 	}
3542       else
3543 	{
3544 	  /* The rule for using CONST_INT for a wider mode
3545 	     is that we regard the value as signed.
3546 	     So sign-extend it.  */
3547 	  rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3548 	  if (WORDS_BIG_ENDIAN)
3549 	    {
3550 	      *first = high;
3551 	      *second = value;
3552 	    }
3553 	  else
3554 	    {
3555 	      *first = value;
3556 	      *second = high;
3557 	    }
3558 	}
3559     }
3560   else if (GET_CODE (value) != CONST_DOUBLE)
3561     {
3562       if (WORDS_BIG_ENDIAN)
3563 	{
3564 	  *first = const0_rtx;
3565 	  *second = value;
3566 	}
3567       else
3568 	{
3569 	  *first = value;
3570 	  *second = const0_rtx;
3571 	}
3572     }
3573   else if (GET_MODE (value) == VOIDmode
3574 	   /* This is the old way we did CONST_DOUBLE integers.  */
3575 	   || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3576     {
3577       /* In an integer, the words are defined as most and least significant.
3578 	 So order them by the target's convention.  */
3579       if (WORDS_BIG_ENDIAN)
3580 	{
3581 	  *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3582 	  *second = GEN_INT (CONST_DOUBLE_LOW (value));
3583 	}
3584       else
3585 	{
3586 	  *first = GEN_INT (CONST_DOUBLE_LOW (value));
3587 	  *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3588 	}
3589     }
3590   else
3591     {
3592       REAL_VALUE_TYPE r;
3593       long l[2];
3594       REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3595 
3596       /* Note, this converts the REAL_VALUE_TYPE to the target's
3597 	 format, splits up the floating point double and outputs
3598 	 exactly 32 bits of it into each of l[0] and l[1] --
3599 	 not necessarily BITS_PER_WORD bits.  */
3600       REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3601 
3602       /* If 32 bits is an entire word for the target, but not for the host,
3603 	 then sign-extend on the host so that the number will look the same
3604 	 way on the host that it would on the target.  See for instance
3605 	 simplify_unary_operation.  The #if is needed to avoid compiler
3606 	 warnings.  */
3607 
3608 #if HOST_BITS_PER_LONG > 32
3609       if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3610 	{
3611 	  if (l[0] & ((long) 1 << 31))
3612 	    l[0] |= ((long) (-1) << 32);
3613 	  if (l[1] & ((long) 1 << 31))
3614 	    l[1] |= ((long) (-1) << 32);
3615 	}
3616 #endif
3617 
3618       *first = GEN_INT (l[0]);
3619       *second = GEN_INT (l[1]);
3620     }
3621 }
3622 
3623 /* Return nonzero if this function has no function calls.  */
3624 
3625 int
leaf_function_p(void)3626 leaf_function_p (void)
3627 {
3628   rtx insn;
3629   rtx link;
3630 
3631   if (current_function_profile || profile_arc_flag)
3632     return 0;
3633 
3634   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3635     {
3636       if (CALL_P (insn)
3637 	  && ! SIBLING_CALL_P (insn))
3638 	return 0;
3639       if (NONJUMP_INSN_P (insn)
3640 	  && GET_CODE (PATTERN (insn)) == SEQUENCE
3641 	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3642 	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3643 	return 0;
3644     }
3645   for (link = current_function_epilogue_delay_list;
3646        link;
3647        link = XEXP (link, 1))
3648     {
3649       insn = XEXP (link, 0);
3650 
3651       if (CALL_P (insn)
3652 	  && ! SIBLING_CALL_P (insn))
3653 	return 0;
3654       if (NONJUMP_INSN_P (insn)
3655 	  && GET_CODE (PATTERN (insn)) == SEQUENCE
3656 	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3657 	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3658 	return 0;
3659     }
3660 
3661   return 1;
3662 }
3663 
3664 /* Return 1 if branch is a forward branch.
3665    Uses insn_shuid array, so it works only in the final pass.  May be used by
3666    output templates to customary add branch prediction hints.
3667  */
3668 int
final_forward_branch_p(rtx insn)3669 final_forward_branch_p (rtx insn)
3670 {
3671   int insn_id, label_id;
3672 
3673   gcc_assert (uid_shuid);
3674   insn_id = INSN_SHUID (insn);
3675   label_id = INSN_SHUID (JUMP_LABEL (insn));
3676   /* We've hit some insns that does not have id information available.  */
3677   gcc_assert (insn_id && label_id);
3678   return insn_id < label_id;
3679 }
3680 
3681 /* On some machines, a function with no call insns
3682    can run faster if it doesn't create its own register window.
3683    When output, the leaf function should use only the "output"
3684    registers.  Ordinarily, the function would be compiled to use
3685    the "input" registers to find its arguments; it is a candidate
3686    for leaf treatment if it uses only the "input" registers.
3687    Leaf function treatment means renumbering so the function
3688    uses the "output" registers instead.  */
3689 
3690 #ifdef LEAF_REGISTERS
3691 
3692 /* Return 1 if this function uses only the registers that can be
3693    safely renumbered.  */
3694 
3695 int
only_leaf_regs_used(void)3696 only_leaf_regs_used (void)
3697 {
3698   int i;
3699   const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3700 
3701   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3702     if ((regs_ever_live[i] || global_regs[i])
3703 	&& ! permitted_reg_in_leaf_functions[i])
3704       return 0;
3705 
3706   if (current_function_uses_pic_offset_table
3707       && pic_offset_table_rtx != 0
3708       && REG_P (pic_offset_table_rtx)
3709       && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
3710     return 0;
3711 
3712   return 1;
3713 }
3714 
3715 /* Scan all instructions and renumber all registers into those
3716    available in leaf functions.  */
3717 
3718 static void
leaf_renumber_regs(rtx first)3719 leaf_renumber_regs (rtx first)
3720 {
3721   rtx insn;
3722 
3723   /* Renumber only the actual patterns.
3724      The reg-notes can contain frame pointer refs,
3725      and renumbering them could crash, and should not be needed.  */
3726   for (insn = first; insn; insn = NEXT_INSN (insn))
3727     if (INSN_P (insn))
3728       leaf_renumber_regs_insn (PATTERN (insn));
3729   for (insn = current_function_epilogue_delay_list;
3730        insn;
3731        insn = XEXP (insn, 1))
3732     if (INSN_P (XEXP (insn, 0)))
3733       leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
3734 }
3735 
3736 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
3737    available in leaf functions.  */
3738 
3739 void
leaf_renumber_regs_insn(rtx in_rtx)3740 leaf_renumber_regs_insn (rtx in_rtx)
3741 {
3742   int i, j;
3743   const char *format_ptr;
3744 
3745   if (in_rtx == 0)
3746     return;
3747 
3748   /* Renumber all input-registers into output-registers.
3749      renumbered_regs would be 1 for an output-register;
3750      they  */
3751 
3752   if (REG_P (in_rtx))
3753     {
3754       int newreg;
3755 
3756       /* Don't renumber the same reg twice.  */
3757       if (in_rtx->used)
3758 	return;
3759 
3760       newreg = REGNO (in_rtx);
3761       /* Don't try to renumber pseudo regs.  It is possible for a pseudo reg
3762 	 to reach here as part of a REG_NOTE.  */
3763       if (newreg >= FIRST_PSEUDO_REGISTER)
3764 	{
3765 	  in_rtx->used = 1;
3766 	  return;
3767 	}
3768       newreg = LEAF_REG_REMAP (newreg);
3769       gcc_assert (newreg >= 0);
3770       regs_ever_live[REGNO (in_rtx)] = 0;
3771       regs_ever_live[newreg] = 1;
3772       REGNO (in_rtx) = newreg;
3773       in_rtx->used = 1;
3774     }
3775 
3776   if (INSN_P (in_rtx))
3777     {
3778       /* Inside a SEQUENCE, we find insns.
3779 	 Renumber just the patterns of these insns,
3780 	 just as we do for the top-level insns.  */
3781       leaf_renumber_regs_insn (PATTERN (in_rtx));
3782       return;
3783     }
3784 
3785   format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
3786 
3787   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
3788     switch (*format_ptr++)
3789       {
3790       case 'e':
3791 	leaf_renumber_regs_insn (XEXP (in_rtx, i));
3792 	break;
3793 
3794       case 'E':
3795 	if (NULL != XVEC (in_rtx, i))
3796 	  {
3797 	    for (j = 0; j < XVECLEN (in_rtx, i); j++)
3798 	      leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
3799 	  }
3800 	break;
3801 
3802       case 'S':
3803       case 's':
3804       case '0':
3805       case 'i':
3806       case 'w':
3807       case 'n':
3808       case 'u':
3809 	break;
3810 
3811       default:
3812 	gcc_unreachable ();
3813       }
3814 }
3815 #endif
3816 
3817 
3818 /* When -gused is used, emit debug info for only used symbols. But in
3819    addition to the standard intercepted debug_hooks there are some direct
3820    calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
3821    Those routines may also be called from a higher level intercepted routine. So
3822    to prevent recording data for an inner call to one of these for an intercept,
3823    we maintain an intercept nesting counter (debug_nesting). We only save the
3824    intercepted arguments if the nesting is 1.  */
3825 int debug_nesting = 0;
3826 
3827 static tree *symbol_queue;
3828 int symbol_queue_index = 0;
3829 static int symbol_queue_size = 0;
3830 
3831 /* Generate the symbols for any queued up type symbols we encountered
3832    while generating the type info for some originally used symbol.
3833    This might generate additional entries in the queue.  Only when
3834    the nesting depth goes to 0 is this routine called.  */
3835 
3836 void
debug_flush_symbol_queue(void)3837 debug_flush_symbol_queue (void)
3838 {
3839   int i;
3840 
3841   /* Make sure that additionally queued items are not flushed
3842      prematurely.  */
3843 
3844   ++debug_nesting;
3845 
3846   for (i = 0; i < symbol_queue_index; ++i)
3847     {
3848       /* If we pushed queued symbols then such symbols must be
3849          output no matter what anyone else says.  Specifically,
3850          we need to make sure dbxout_symbol() thinks the symbol was
3851          used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
3852          which may be set for outside reasons.  */
3853       int saved_tree_used = TREE_USED (symbol_queue[i]);
3854       int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
3855       TREE_USED (symbol_queue[i]) = 1;
3856       TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
3857 
3858 #ifdef DBX_DEBUGGING_INFO
3859       dbxout_symbol (symbol_queue[i], 0);
3860 #endif
3861 
3862       TREE_USED (symbol_queue[i]) = saved_tree_used;
3863       TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
3864     }
3865 
3866   symbol_queue_index = 0;
3867   --debug_nesting;
3868 }
3869 
3870 /* Queue a type symbol needed as part of the definition of a decl
3871    symbol.  These symbols are generated when debug_flush_symbol_queue()
3872    is called.  */
3873 
3874 void
debug_queue_symbol(tree decl)3875 debug_queue_symbol (tree decl)
3876 {
3877   if (symbol_queue_index >= symbol_queue_size)
3878     {
3879       symbol_queue_size += 10;
3880       symbol_queue = xrealloc (symbol_queue,
3881 			       symbol_queue_size * sizeof (tree));
3882     }
3883 
3884   symbol_queue[symbol_queue_index++] = decl;
3885 }
3886 
3887 /* Free symbol queue.  */
3888 void
debug_free_queue(void)3889 debug_free_queue (void)
3890 {
3891   if (symbol_queue)
3892     {
3893       free (symbol_queue);
3894       symbol_queue = NULL;
3895       symbol_queue_size = 0;
3896     }
3897 }
3898 
3899 /* Turn the RTL into assembly.  */
3900 static unsigned int
rest_of_handle_final(void)3901 rest_of_handle_final (void)
3902 {
3903   rtx x;
3904   const char *fnname;
3905 
3906   /* Get the function's name, as described by its RTL.  This may be
3907      different from the DECL_NAME name used in the source file.  */
3908 
3909   x = DECL_RTL (current_function_decl);
3910   gcc_assert (MEM_P (x));
3911   x = XEXP (x, 0);
3912   gcc_assert (GET_CODE (x) == SYMBOL_REF);
3913   fnname = XSTR (x, 0);
3914 
3915   assemble_start_function (current_function_decl, fnname);
3916   final_start_function (get_insns (), asm_out_file, optimize);
3917   final (get_insns (), asm_out_file, optimize);
3918   final_end_function ();
3919 
3920 #ifdef TARGET_UNWIND_INFO
3921   /* ??? The IA-64 ".handlerdata" directive must be issued before
3922      the ".endp" directive that closes the procedure descriptor.  */
3923   output_function_exception_table ();
3924 #endif
3925 
3926   assemble_end_function (current_function_decl, fnname);
3927 
3928 #ifndef TARGET_UNWIND_INFO
3929   /* Otherwise, it feels unclean to switch sections in the middle.  */
3930   output_function_exception_table ();
3931 #endif
3932 
3933   user_defined_section_attribute = false;
3934 
3935   if (! quiet_flag)
3936     fflush (asm_out_file);
3937 
3938   /* Release all memory allocated by flow.  */
3939   free_basic_block_vars ();
3940 
3941   /* Write DBX symbols if requested.  */
3942 
3943   /* Note that for those inline functions where we don't initially
3944      know for certain that we will be generating an out-of-line copy,
3945      the first invocation of this routine (rest_of_compilation) will
3946      skip over this code by doing a `goto exit_rest_of_compilation;'.
3947      Later on, wrapup_global_declarations will (indirectly) call
3948      rest_of_compilation again for those inline functions that need
3949      to have out-of-line copies generated.  During that call, we
3950      *will* be routed past here.  */
3951 
3952   timevar_push (TV_SYMOUT);
3953   (*debug_hooks->function_decl) (current_function_decl);
3954   timevar_pop (TV_SYMOUT);
3955   return 0;
3956 }
3957 
3958 struct tree_opt_pass pass_final =
3959 {
3960   NULL,                                 /* name */
3961   NULL,                                 /* gate */
3962   rest_of_handle_final,                 /* execute */
3963   NULL,                                 /* sub */
3964   NULL,                                 /* next */
3965   0,                                    /* static_pass_number */
3966   TV_FINAL,                             /* tv_id */
3967   0,                                    /* properties_required */
3968   0,                                    /* properties_provided */
3969   0,                                    /* properties_destroyed */
3970   0,                                    /* todo_flags_start */
3971   TODO_ggc_collect,                     /* todo_flags_finish */
3972   0                                     /* letter */
3973 };
3974 
3975 
3976 static unsigned int
rest_of_handle_shorten_branches(void)3977 rest_of_handle_shorten_branches (void)
3978 {
3979   /* Shorten branches.  */
3980   shorten_branches (get_insns ());
3981   return 0;
3982 }
3983 
3984 struct tree_opt_pass pass_shorten_branches =
3985 {
3986   "shorten",                            /* name */
3987   NULL,                                 /* gate */
3988   rest_of_handle_shorten_branches,      /* execute */
3989   NULL,                                 /* sub */
3990   NULL,                                 /* next */
3991   0,                                    /* static_pass_number */
3992   TV_FINAL,                             /* tv_id */
3993   0,                                    /* properties_required */
3994   0,                                    /* properties_provided */
3995   0,                                    /* properties_destroyed */
3996   0,                                    /* todo_flags_start */
3997   TODO_dump_func,                       /* todo_flags_finish */
3998   0                                     /* letter */
3999 };
4000 
4001 
4002 static unsigned int
rest_of_clean_state(void)4003 rest_of_clean_state (void)
4004 {
4005   rtx insn, next;
4006 
4007   /* It is very important to decompose the RTL instruction chain here:
4008      debug information keeps pointing into CODE_LABEL insns inside the function
4009      body.  If these remain pointing to the other insns, we end up preserving
4010      whole RTL chain and attached detailed debug info in memory.  */
4011   for (insn = get_insns (); insn; insn = next)
4012     {
4013       next = NEXT_INSN (insn);
4014       NEXT_INSN (insn) = NULL;
4015       PREV_INSN (insn) = NULL;
4016     }
4017 
4018   /* In case the function was not output,
4019      don't leave any temporary anonymous types
4020      queued up for sdb output.  */
4021 #ifdef SDB_DEBUGGING_INFO
4022   if (write_symbols == SDB_DEBUG)
4023     sdbout_types (NULL_TREE);
4024 #endif
4025 
4026   reload_completed = 0;
4027   epilogue_completed = 0;
4028   flow2_completed = 0;
4029   no_new_pseudos = 0;
4030 #ifdef STACK_REGS
4031   regstack_completed = 0;
4032 #endif
4033 
4034   /* Clear out the insn_length contents now that they are no
4035      longer valid.  */
4036   init_insn_lengths ();
4037 
4038   /* Show no temporary slots allocated.  */
4039   init_temp_slots ();
4040 
4041   free_basic_block_vars ();
4042   free_bb_for_insn ();
4043 
4044 
4045   if (targetm.binds_local_p (current_function_decl))
4046     {
4047       int pref = cfun->preferred_stack_boundary;
4048       if (cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
4049         pref = cfun->stack_alignment_needed;
4050       cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4051         = pref;
4052     }
4053 
4054   /* Make sure volatile mem refs aren't considered valid operands for
4055      arithmetic insns.  We must call this here if this is a nested inline
4056      function, since the above code leaves us in the init_recog state,
4057      and the function context push/pop code does not save/restore volatile_ok.
4058 
4059      ??? Maybe it isn't necessary for expand_start_function to call this
4060      anymore if we do it here?  */
4061 
4062   init_recog_no_volatile ();
4063 
4064   /* We're done with this function.  Free up memory if we can.  */
4065   free_after_parsing (cfun);
4066   free_after_compilation (cfun);
4067   return 0;
4068 }
4069 
4070 struct tree_opt_pass pass_clean_state =
4071 {
4072   NULL,                                 /* name */
4073   NULL,                                 /* gate */
4074   rest_of_clean_state,                  /* execute */
4075   NULL,                                 /* sub */
4076   NULL,                                 /* next */
4077   0,                                    /* static_pass_number */
4078   TV_FINAL,                             /* tv_id */
4079   0,                                    /* properties_required */
4080   0,                                    /* properties_provided */
4081   PROP_rtl,                             /* properties_destroyed */
4082   0,                                    /* todo_flags_start */
4083   0,                                    /* todo_flags_finish */
4084   0                                     /* letter */
4085 };
4086 
4087