xref: /openbsd/gnu/usr.bin/gcc/gcc/config/alpha/alpha.c (revision e97b50d0)
1 /* Subroutines used for code generation on the DEC Alpha.
2    Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3    2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4    Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5 
6 This file is part of GNU CC.
7 
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12 
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING.  If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA.  */
22 
23 
24 #include "config.h"
25 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "recog.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "reload.h"
40 #include "obstack.h"
41 #include "except.h"
42 #include "function.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "integrate.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "debug.h"
50 #include "langhooks.h"
51 
52 /* Specify which cpu to schedule for.  */
53 
54 enum processor_type alpha_cpu;
55 static const char * const alpha_cpu_name[] =
56 {
57   "ev4", "ev5", "ev6"
58 };
59 
60 /* Specify how accurate floating-point traps need to be.  */
61 
62 enum alpha_trap_precision alpha_tp;
63 
64 /* Specify the floating-point rounding mode.  */
65 
66 enum alpha_fp_rounding_mode alpha_fprm;
67 
68 /* Specify which things cause traps.  */
69 
70 enum alpha_fp_trap_mode alpha_fptm;
71 
72 /* Specify bit size of immediate TLS offsets.  */
73 
74 int alpha_tls_size = 32;
75 
76 /* Strings decoded into the above options.  */
77 
78 const char *alpha_cpu_string;	/* -mcpu= */
79 const char *alpha_tune_string;	/* -mtune= */
80 const char *alpha_tp_string;	/* -mtrap-precision=[p|s|i] */
81 const char *alpha_fprm_string;	/* -mfp-rounding-mode=[n|m|c|d] */
82 const char *alpha_fptm_string;	/* -mfp-trap-mode=[n|u|su|sui] */
83 const char *alpha_mlat_string;	/* -mmemory-latency= */
84 const char *alpha_tls_size_string; /* -mtls-size=[16|32|64] */
85 
86 /* Save information from a "cmpxx" operation until the branch or scc is
87    emitted.  */
88 
89 struct alpha_compare alpha_compare;
90 
91 /* Nonzero if inside of a function, because the Alpha asm can't
92    handle .files inside of functions.  */
93 
94 static int inside_function = FALSE;
95 
96 /* The number of cycles of latency we should assume on memory reads.  */
97 
98 int alpha_memory_latency = 3;
99 
100 /* Whether the function needs the GP.  */
101 
102 static int alpha_function_needs_gp;
103 
104 /* The alias set for prologue/epilogue register save/restore.  */
105 
106 static int alpha_sr_alias_set;
107 
108 /* The assembler name of the current function.  */
109 
110 static const char *alpha_fnname;
111 
112 /* The next explicit relocation sequence number.  */
113 int alpha_next_sequence_number = 1;
114 
115 /* The literal and gpdisp sequence numbers for this insn, as printed
116    by %# and %* respectively.  */
117 int alpha_this_literal_sequence_number;
118 int alpha_this_gpdisp_sequence_number;
119 
120 /* Declarations of static functions.  */
121 static int tls_symbolic_operand_1
122   PARAMS ((rtx, enum machine_mode, int, int));
123 static enum tls_model tls_symbolic_operand_type
124   PARAMS ((rtx));
125 static bool decl_in_text_section
126   PARAMS ((tree));
127 static bool alpha_in_small_data_p
128   PARAMS ((tree));
129 static void alpha_encode_section_info
130   PARAMS ((tree, int));
131 static const char *alpha_strip_name_encoding
132   PARAMS ((const char *));
133 static int some_small_symbolic_operand_1
134   PARAMS ((rtx *, void *));
135 static int split_small_symbolic_operand_1
136   PARAMS ((rtx *, void *));
137 static void alpha_set_memflags_1
138   PARAMS ((rtx, int, int, int));
139 static rtx alpha_emit_set_const_1
140   PARAMS ((rtx, enum machine_mode, HOST_WIDE_INT, int));
141 static void alpha_expand_unaligned_load_words
142   PARAMS ((rtx *out_regs, rtx smem, HOST_WIDE_INT words, HOST_WIDE_INT ofs));
143 static void alpha_expand_unaligned_store_words
144   PARAMS ((rtx *out_regs, rtx smem, HOST_WIDE_INT words, HOST_WIDE_INT ofs));
145 static void alpha_init_builtins
146   PARAMS ((void));
147 static rtx alpha_expand_builtin
148   PARAMS ((tree, rtx, rtx, enum machine_mode, int));
149 static void alpha_sa_mask
150   PARAMS ((unsigned long *imaskP, unsigned long *fmaskP));
151 static int find_lo_sum_using_gp
152   PARAMS ((rtx *, void *));
153 static int alpha_does_function_need_gp
154   PARAMS ((void));
155 static int alpha_ra_ever_killed
156   PARAMS ((void));
157 static const char *get_trap_mode_suffix
158   PARAMS ((void));
159 static const char *get_round_mode_suffix
160   PARAMS ((void));
161 static const char *get_some_local_dynamic_name
162   PARAMS ((void));
163 static int get_some_local_dynamic_name_1
164   PARAMS ((rtx *, void *));
165 static rtx set_frame_related_p
166   PARAMS ((void));
167 static const char *alpha_lookup_xfloating_lib_func
168   PARAMS ((enum rtx_code));
169 static int alpha_compute_xfloating_mode_arg
170   PARAMS ((enum rtx_code, enum alpha_fp_rounding_mode));
171 static void alpha_emit_xfloating_libcall
172   PARAMS ((const char *, rtx, rtx[], int, rtx));
173 static rtx alpha_emit_xfloating_compare
174   PARAMS ((enum rtx_code, rtx, rtx));
175 static void alpha_output_function_end_prologue
176   PARAMS ((FILE *));
177 static int alpha_adjust_cost
178   PARAMS ((rtx, rtx, rtx, int));
179 static int alpha_issue_rate
180   PARAMS ((void));
181 static int alpha_use_dfa_pipeline_interface
182   PARAMS ((void));
183 static int alpha_multipass_dfa_lookahead
184   PARAMS ((void));
185 
186 #ifdef OBJECT_FORMAT_ELF
187 static void alpha_elf_select_rtx_section
188   PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
189 #endif
190 
191 #if TARGET_ABI_OPEN_VMS
192 static bool alpha_linkage_symbol_p
193   PARAMS ((const char *symname));
194 static void alpha_write_linkage
195   PARAMS ((FILE *, const char *, tree));
196 #endif
197 
198 #if TARGET_ABI_OSF
199 static void alpha_output_mi_thunk_osf
200   PARAMS ((FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree));
201 #endif
202 
203 static struct machine_function * alpha_init_machine_status
204   PARAMS ((void));
205 
206 static void unicosmk_output_deferred_case_vectors PARAMS ((FILE *));
207 static void unicosmk_gen_dsib PARAMS ((unsigned long *imaskP));
208 static void unicosmk_output_ssib PARAMS ((FILE *, const char *));
209 static int unicosmk_need_dex PARAMS ((rtx));
210 
211 /* Get the number of args of a function in one of two ways.  */
212 #if TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK
213 #define NUM_ARGS current_function_args_info.num_args
214 #else
215 #define NUM_ARGS current_function_args_info
216 #endif
217 
218 #define REG_PV 27
219 #define REG_RA 26
220 
221 /* Initialize the GCC target structure.  */
222 #if TARGET_ABI_OPEN_VMS
223 const struct attribute_spec vms_attribute_table[];
224 static unsigned int vms_section_type_flags PARAMS ((tree, const char *, int));
225 static void vms_asm_named_section PARAMS ((const char *, unsigned int));
226 static void vms_asm_out_constructor PARAMS ((rtx, int));
227 static void vms_asm_out_destructor PARAMS ((rtx, int));
228 # undef TARGET_ATTRIBUTE_TABLE
229 # define TARGET_ATTRIBUTE_TABLE vms_attribute_table
230 # undef TARGET_SECTION_TYPE_FLAGS
231 # define TARGET_SECTION_TYPE_FLAGS vms_section_type_flags
232 #endif
233 
234 #undef TARGET_IN_SMALL_DATA_P
235 #define TARGET_IN_SMALL_DATA_P alpha_in_small_data_p
236 #undef TARGET_ENCODE_SECTION_INFO
237 #define TARGET_ENCODE_SECTION_INFO alpha_encode_section_info
238 #undef TARGET_STRIP_NAME_ENCODING
239 #define TARGET_STRIP_NAME_ENCODING alpha_strip_name_encoding
240 
241 #if TARGET_ABI_UNICOSMK
242 static void unicosmk_asm_named_section PARAMS ((const char *, unsigned int));
243 static void unicosmk_insert_attributes PARAMS ((tree, tree *));
244 static unsigned int unicosmk_section_type_flags PARAMS ((tree, const char *,
245 							 int));
246 static void unicosmk_unique_section PARAMS ((tree, int));
247 # undef TARGET_INSERT_ATTRIBUTES
248 # define TARGET_INSERT_ATTRIBUTES unicosmk_insert_attributes
249 # undef TARGET_SECTION_TYPE_FLAGS
250 # define TARGET_SECTION_TYPE_FLAGS unicosmk_section_type_flags
251 # undef TARGET_ASM_UNIQUE_SECTION
252 # define TARGET_ASM_UNIQUE_SECTION unicosmk_unique_section
253 # undef TARGET_ASM_GLOBALIZE_LABEL
254 # define TARGET_ASM_GLOBALIZE_LABEL hook_FILEptr_constcharptr_void
255 #endif
256 
257 #undef TARGET_ASM_ALIGNED_HI_OP
258 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
259 #undef TARGET_ASM_ALIGNED_DI_OP
260 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
261 
262 /* Default unaligned ops are provided for ELF systems.  To get unaligned
263    data for non-ELF systems, we have to turn off auto alignment.  */
264 #ifndef OBJECT_FORMAT_ELF
265 #undef TARGET_ASM_UNALIGNED_HI_OP
266 #define TARGET_ASM_UNALIGNED_HI_OP "\t.align 0\n\t.word\t"
267 #undef TARGET_ASM_UNALIGNED_SI_OP
268 #define TARGET_ASM_UNALIGNED_SI_OP "\t.align 0\n\t.long\t"
269 #undef TARGET_ASM_UNALIGNED_DI_OP
270 #define TARGET_ASM_UNALIGNED_DI_OP "\t.align 0\n\t.quad\t"
271 #endif
272 
273 #ifdef OBJECT_FORMAT_ELF
274 #undef	TARGET_ASM_SELECT_RTX_SECTION
275 #define	TARGET_ASM_SELECT_RTX_SECTION  alpha_elf_select_rtx_section
276 #endif
277 
278 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
279 #define TARGET_ASM_FUNCTION_END_PROLOGUE alpha_output_function_end_prologue
280 
281 #undef TARGET_SCHED_ADJUST_COST
282 #define TARGET_SCHED_ADJUST_COST alpha_adjust_cost
283 #undef TARGET_SCHED_ISSUE_RATE
284 #define TARGET_SCHED_ISSUE_RATE alpha_issue_rate
285 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
286 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE \
287   alpha_use_dfa_pipeline_interface
288 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
289 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD \
290   alpha_multipass_dfa_lookahead
291 
292 #undef TARGET_HAVE_TLS
293 #define TARGET_HAVE_TLS HAVE_AS_TLS
294 
295 #undef  TARGET_INIT_BUILTINS
296 #define TARGET_INIT_BUILTINS alpha_init_builtins
297 #undef  TARGET_EXPAND_BUILTIN
298 #define TARGET_EXPAND_BUILTIN alpha_expand_builtin
299 
300 #if TARGET_ABI_OSF
301 #undef TARGET_ASM_OUTPUT_MI_THUNK
302 #define TARGET_ASM_OUTPUT_MI_THUNK alpha_output_mi_thunk_osf
303 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
304 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
305 #endif
306 
307 struct gcc_target targetm = TARGET_INITIALIZER;
308 
309 /* Parse target option strings.  */
310 
311 void
override_options()312 override_options ()
313 {
314   int i;
315   static const struct cpu_table {
316     const char *const name;
317     const enum processor_type processor;
318     const int flags;
319   } cpu_table[] = {
320 #define EV5_MASK (MASK_CPU_EV5)
321 #define EV6_MASK (MASK_CPU_EV6|MASK_BWX|MASK_MAX|MASK_FIX)
322     { "ev4",	PROCESSOR_EV4, 0 },
323     { "ev45",	PROCESSOR_EV4, 0 },
324     { "21064",	PROCESSOR_EV4, 0 },
325     { "ev5",	PROCESSOR_EV5, EV5_MASK },
326     { "21164",	PROCESSOR_EV5, EV5_MASK },
327     { "ev56",	PROCESSOR_EV5, EV5_MASK|MASK_BWX },
328     { "21164a",	PROCESSOR_EV5, EV5_MASK|MASK_BWX },
329     { "pca56",	PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
330     { "21164PC",PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
331     { "21164pc",PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
332     { "ev6",	PROCESSOR_EV6, EV6_MASK },
333     { "21264",	PROCESSOR_EV6, EV6_MASK },
334     { "ev67",	PROCESSOR_EV6, EV6_MASK|MASK_CIX },
335     { "21264a",	PROCESSOR_EV6, EV6_MASK|MASK_CIX },
336     { 0, 0, 0 }
337   };
338 
339   /* Unicos/Mk doesn't have shared libraries.  */
340   if (TARGET_ABI_UNICOSMK && flag_pic)
341     {
342       warning ("-f%s ignored for Unicos/Mk (not supported)",
343 	       (flag_pic > 1) ? "PIC" : "pic");
344       flag_pic = 0;
345     }
346 
347 #if defined(OPENBSD_NATIVE) || defined(OPENBSD_CROSS)
348   if (TARGET_FLOAT_VAX)
349     alpha_fprm = ALPHA_FPRM_NORM;
350   else
351     alpha_fprm = ALPHA_FPRM_DYN;
352 #else
353   /* On Unicos/Mk, the native compiler consistenly generates /d suffices for
354      floating-point instructions.  Make that the default for this target.  */
355   if (TARGET_ABI_UNICOSMK)
356     alpha_fprm = ALPHA_FPRM_DYN;
357   else
358     alpha_fprm = ALPHA_FPRM_NORM;
359 #endif
360 
361   alpha_tp = ALPHA_TP_PROG;
362   alpha_fptm = ALPHA_FPTM_N;
363 
364   /* We cannot use su and sui qualifiers for conversion instructions on
365      Unicos/Mk.  I'm not sure if this is due to assembler or hardware
366      limitations.  Right now, we issue a warning if -mieee is specified
367      and then ignore it; eventually, we should either get it right or
368      disable the option altogether.  */
369 
370   if (TARGET_IEEE)
371     {
372       if (TARGET_ABI_UNICOSMK)
373 	warning ("-mieee not supported on Unicos/Mk");
374       else
375 	{
376 	  alpha_tp = ALPHA_TP_INSN;
377 	  alpha_fptm = ALPHA_FPTM_SU;
378 	}
379     }
380 
381   if (TARGET_IEEE_WITH_INEXACT)
382     {
383       if (TARGET_ABI_UNICOSMK)
384 	warning ("-mieee-with-inexact not supported on Unicos/Mk");
385       else
386 	{
387 	  alpha_tp = ALPHA_TP_INSN;
388 	  alpha_fptm = ALPHA_FPTM_SUI;
389 	}
390     }
391 
392   if (alpha_tp_string)
393     {
394       if (! strcmp (alpha_tp_string, "p"))
395 	alpha_tp = ALPHA_TP_PROG;
396       else if (! strcmp (alpha_tp_string, "f"))
397 	alpha_tp = ALPHA_TP_FUNC;
398       else if (! strcmp (alpha_tp_string, "i"))
399 	alpha_tp = ALPHA_TP_INSN;
400       else
401 	error ("bad value `%s' for -mtrap-precision switch", alpha_tp_string);
402     }
403 
404   if (alpha_fprm_string)
405     {
406       if (! strcmp (alpha_fprm_string, "n"))
407 	alpha_fprm = ALPHA_FPRM_NORM;
408       else if (! strcmp (alpha_fprm_string, "m"))
409 	alpha_fprm = ALPHA_FPRM_MINF;
410       else if (! strcmp (alpha_fprm_string, "c"))
411 	alpha_fprm = ALPHA_FPRM_CHOP;
412       else if (! strcmp (alpha_fprm_string,"d"))
413 	alpha_fprm = ALPHA_FPRM_DYN;
414       else
415 	error ("bad value `%s' for -mfp-rounding-mode switch",
416 	       alpha_fprm_string);
417     }
418 
419   if (alpha_fptm_string)
420     {
421       if (strcmp (alpha_fptm_string, "n") == 0)
422 	alpha_fptm = ALPHA_FPTM_N;
423       else if (strcmp (alpha_fptm_string, "u") == 0)
424 	alpha_fptm = ALPHA_FPTM_U;
425       else if (strcmp (alpha_fptm_string, "su") == 0)
426 	alpha_fptm = ALPHA_FPTM_SU;
427       else if (strcmp (alpha_fptm_string, "sui") == 0)
428 	alpha_fptm = ALPHA_FPTM_SUI;
429       else
430 	error ("bad value `%s' for -mfp-trap-mode switch", alpha_fptm_string);
431     }
432 
433   if (alpha_tls_size_string)
434     {
435       if (strcmp (alpha_tls_size_string, "16") == 0)
436 	alpha_tls_size = 16;
437       else if (strcmp (alpha_tls_size_string, "32") == 0)
438 	alpha_tls_size = 32;
439       else if (strcmp (alpha_tls_size_string, "64") == 0)
440 	alpha_tls_size = 64;
441       else
442 	error ("bad value `%s' for -mtls-size switch", alpha_tls_size_string);
443     }
444 
445   alpha_cpu
446     = TARGET_CPU_DEFAULT & MASK_CPU_EV6 ? PROCESSOR_EV6
447       : (TARGET_CPU_DEFAULT & MASK_CPU_EV5 ? PROCESSOR_EV5 : PROCESSOR_EV4);
448 
449   if (alpha_cpu_string)
450     {
451       for (i = 0; cpu_table [i].name; i++)
452 	if (! strcmp (alpha_cpu_string, cpu_table [i].name))
453 	  {
454 	    alpha_cpu = cpu_table [i].processor;
455 	    target_flags &= ~ (MASK_BWX | MASK_MAX | MASK_FIX | MASK_CIX
456 			       | MASK_CPU_EV5 | MASK_CPU_EV6);
457 	    target_flags |= cpu_table [i].flags;
458 	    break;
459 	  }
460       if (! cpu_table [i].name)
461 	error ("bad value `%s' for -mcpu switch", alpha_cpu_string);
462     }
463 
464   if (alpha_tune_string)
465     {
466       for (i = 0; cpu_table [i].name; i++)
467 	if (! strcmp (alpha_tune_string, cpu_table [i].name))
468 	  {
469 	    alpha_cpu = cpu_table [i].processor;
470 	    break;
471 	  }
472       if (! cpu_table [i].name)
473 	error ("bad value `%s' for -mcpu switch", alpha_tune_string);
474     }
475 
476   /* Do some sanity checks on the above options.  */
477 
478   if (TARGET_ABI_UNICOSMK && alpha_fptm != ALPHA_FPTM_N)
479     {
480       warning ("trap mode not supported on Unicos/Mk");
481       alpha_fptm = ALPHA_FPTM_N;
482     }
483 
484   if ((alpha_fptm == ALPHA_FPTM_SU || alpha_fptm == ALPHA_FPTM_SUI)
485       && alpha_tp != ALPHA_TP_INSN && ! TARGET_CPU_EV6)
486     {
487       warning ("fp software completion requires -mtrap-precision=i");
488       alpha_tp = ALPHA_TP_INSN;
489     }
490 
491   if (TARGET_CPU_EV6)
492     {
493       /* Except for EV6 pass 1 (not released), we always have precise
494 	 arithmetic traps.  Which means we can do software completion
495 	 without minding trap shadows.  */
496       alpha_tp = ALPHA_TP_PROG;
497     }
498 
499   if (TARGET_FLOAT_VAX)
500     {
501       if (alpha_fprm == ALPHA_FPRM_MINF || alpha_fprm == ALPHA_FPRM_DYN)
502 	{
503 	  warning ("rounding mode not supported for VAX floats");
504 	  alpha_fprm = ALPHA_FPRM_NORM;
505 	}
506       if (alpha_fptm == ALPHA_FPTM_SUI)
507 	{
508 	  warning ("trap mode not supported for VAX floats");
509 	  alpha_fptm = ALPHA_FPTM_SU;
510 	}
511       if (target_flags_explicit & MASK_LONG_DOUBLE_128)
512 	warning ("128-bit long double not supported for VAX floats");
513       target_flags &= ~MASK_LONG_DOUBLE_128;
514     }
515 
516   {
517     char *end;
518     int lat;
519 
520     if (!alpha_mlat_string)
521       alpha_mlat_string = "L1";
522 
523     if (ISDIGIT ((unsigned char)alpha_mlat_string[0])
524 	&& (lat = strtol (alpha_mlat_string, &end, 10), *end == '\0'))
525       ;
526     else if ((alpha_mlat_string[0] == 'L' || alpha_mlat_string[0] == 'l')
527 	     && ISDIGIT ((unsigned char)alpha_mlat_string[1])
528 	     && alpha_mlat_string[2] == '\0')
529       {
530 	static int const cache_latency[][4] =
531 	{
532 	  { 3, 30, -1 },	/* ev4 -- Bcache is a guess */
533 	  { 2, 12, 38 },	/* ev5 -- Bcache from PC164 LMbench numbers */
534 	  { 3, 12, 30 },	/* ev6 -- Bcache from DS20 LMbench.  */
535 	};
536 
537 	lat = alpha_mlat_string[1] - '0';
538 	if (lat <= 0 || lat > 3 || cache_latency[alpha_cpu][lat-1] == -1)
539 	  {
540 	    warning ("L%d cache latency unknown for %s",
541 		     lat, alpha_cpu_name[alpha_cpu]);
542 	    lat = 3;
543 	  }
544 	else
545 	  lat = cache_latency[alpha_cpu][lat-1];
546       }
547     else if (! strcmp (alpha_mlat_string, "main"))
548       {
549 	/* Most current memories have about 370ns latency.  This is
550 	   a reasonable guess for a fast cpu.  */
551 	lat = 150;
552       }
553     else
554       {
555 	warning ("bad value `%s' for -mmemory-latency", alpha_mlat_string);
556 	lat = 3;
557       }
558 
559     alpha_memory_latency = lat;
560   }
561 
562   /* Default the definition of "small data" to 8 bytes.  */
563   if (!g_switch_set)
564     g_switch_value = 8;
565 
566 #ifdef OPENBSD_NATIVE
567   /* Make -fpic behave as -fPIC unless -msmall-data is specified. */
568   if (flag_pic == 2 && TARGET_SMALL_DATA)
569     warning ("-fPIC used with -msmall-data");
570 #else
571   /* Infer TARGET_SMALL_DATA from -fpic/-fPIC.  */
572   if (flag_pic == 1)
573     target_flags |= MASK_SMALL_DATA;
574   else if (flag_pic == 2)
575     target_flags &= ~MASK_SMALL_DATA;
576 #endif
577 
578   /* Align labels and loops for optimal branching.  */
579   /* ??? Kludge these by not doing anything if we don't optimize and also if
580      we are writing ECOFF symbols to work around a bug in DEC's assembler.  */
581   if (optimize > 0 && write_symbols != SDB_DEBUG)
582     {
583       if (align_loops <= 0)
584 	align_loops = 16;
585       if (align_jumps <= 0)
586 	align_jumps = 16;
587     }
588   if (align_functions <= 0)
589     align_functions = 16;
590 
591   /* Acquire a unique set number for our register saves and restores.  */
592   alpha_sr_alias_set = new_alias_set ();
593 
594   /* Register variables and functions with the garbage collector.  */
595 
596   /* Set up function hooks.  */
597   init_machine_status = alpha_init_machine_status;
598 
599   /* Tell the compiler when we're using VAX floating point.  */
600   if (TARGET_FLOAT_VAX)
601     {
602       real_format_for_mode[SFmode - QFmode] = &vax_f_format;
603       real_format_for_mode[DFmode - QFmode] = &vax_g_format;
604       real_format_for_mode[TFmode - QFmode] = NULL;
605     }
606 }
607 
608 /* Returns 1 if VALUE is a mask that contains full bytes of zero or ones.  */
609 
610 int
zap_mask(value)611 zap_mask (value)
612      HOST_WIDE_INT value;
613 {
614   int i;
615 
616   for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
617        i++, value >>= 8)
618     if ((value & 0xff) != 0 && (value & 0xff) != 0xff)
619       return 0;
620 
621   return 1;
622 }
623 
624 /* Returns 1 if OP is either the constant zero or a register.  If a
625    register, it must be in the proper mode unless MODE is VOIDmode.  */
626 
627 int
reg_or_0_operand(op,mode)628 reg_or_0_operand (op, mode)
629       register rtx op;
630       enum machine_mode mode;
631 {
632   return op == CONST0_RTX (mode) || register_operand (op, mode);
633 }
634 
635 /* Return 1 if OP is a constant in the range of 0-63 (for a shift) or
636    any register.  */
637 
638 int
reg_or_6bit_operand(op,mode)639 reg_or_6bit_operand (op, mode)
640      register rtx op;
641      enum machine_mode mode;
642 {
643   return ((GET_CODE (op) == CONST_INT
644 	   && (unsigned HOST_WIDE_INT) INTVAL (op) < 64)
645 	  || register_operand (op, mode));
646 }
647 
648 
649 /* Return 1 if OP is an 8-bit constant or any register.  */
650 
651 int
reg_or_8bit_operand(op,mode)652 reg_or_8bit_operand (op, mode)
653      register rtx op;
654      enum machine_mode mode;
655 {
656   return ((GET_CODE (op) == CONST_INT
657 	   && (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100)
658 	  || register_operand (op, mode));
659 }
660 
661 /* Return 1 if OP is a constant or any register.  */
662 
663 int
reg_or_const_int_operand(op,mode)664 reg_or_const_int_operand (op, mode)
665      register rtx op;
666      enum machine_mode mode;
667 {
668   return GET_CODE (op) == CONST_INT || register_operand (op, mode);
669 }
670 
671 /* Return 1 if OP is an 8-bit constant.  */
672 
673 int
cint8_operand(op,mode)674 cint8_operand (op, mode)
675      register rtx op;
676      enum machine_mode mode ATTRIBUTE_UNUSED;
677 {
678   return ((GET_CODE (op) == CONST_INT
679 	   && (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100));
680 }
681 
682 /* Return 1 if the operand is a valid second operand to an add insn.  */
683 
684 int
add_operand(op,mode)685 add_operand (op, mode)
686      register rtx op;
687      enum machine_mode mode;
688 {
689   if (GET_CODE (op) == CONST_INT)
690     /* Constraints I, J, O and P are covered by K.  */
691     return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'K')
692 	    || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
693 
694   return register_operand (op, mode);
695 }
696 
697 /* Return 1 if the operand is a valid second operand to a sign-extending
698    add insn.  */
699 
700 int
sext_add_operand(op,mode)701 sext_add_operand (op, mode)
702      register rtx op;
703      enum machine_mode mode;
704 {
705   if (GET_CODE (op) == CONST_INT)
706     return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
707 	    || CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'));
708 
709   return reg_not_elim_operand (op, mode);
710 }
711 
712 /* Return 1 if OP is the constant 4 or 8.  */
713 
714 int
const48_operand(op,mode)715 const48_operand (op, mode)
716      register rtx op;
717      enum machine_mode mode ATTRIBUTE_UNUSED;
718 {
719   return (GET_CODE (op) == CONST_INT
720 	  && (INTVAL (op) == 4 || INTVAL (op) == 8));
721 }
722 
723 /* Return 1 if OP is a valid first operand to an AND insn.  */
724 
725 int
and_operand(op,mode)726 and_operand (op, mode)
727      register rtx op;
728      enum machine_mode mode;
729 {
730   if (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
731     return (zap_mask (CONST_DOUBLE_LOW (op))
732 	    && zap_mask (CONST_DOUBLE_HIGH (op)));
733 
734   if (GET_CODE (op) == CONST_INT)
735     return ((unsigned HOST_WIDE_INT) INTVAL (op) < 0x100
736 	    || (unsigned HOST_WIDE_INT) ~ INTVAL (op) < 0x100
737 	    || zap_mask (INTVAL (op)));
738 
739   return register_operand (op, mode);
740 }
741 
742 /* Return 1 if OP is a valid first operand to an IOR or XOR insn.  */
743 
744 int
or_operand(op,mode)745 or_operand (op, mode)
746      register rtx op;
747      enum machine_mode mode;
748 {
749   if (GET_CODE (op) == CONST_INT)
750     return ((unsigned HOST_WIDE_INT) INTVAL (op) < 0x100
751 	    || (unsigned HOST_WIDE_INT) ~ INTVAL (op) < 0x100);
752 
753   return register_operand (op, mode);
754 }
755 
756 /* Return 1 if OP is a constant that is the width, in bits, of an integral
757    mode smaller than DImode.  */
758 
759 int
mode_width_operand(op,mode)760 mode_width_operand (op, mode)
761      register rtx op;
762      enum machine_mode mode ATTRIBUTE_UNUSED;
763 {
764   return (GET_CODE (op) == CONST_INT
765 	  && (INTVAL (op) == 8 || INTVAL (op) == 16
766 	      || INTVAL (op) == 32 || INTVAL (op) == 64));
767 }
768 
769 /* Return 1 if OP is a constant that is the width of an integral machine mode
770    smaller than an integer.  */
771 
772 int
mode_mask_operand(op,mode)773 mode_mask_operand (op, mode)
774      register rtx op;
775      enum machine_mode mode ATTRIBUTE_UNUSED;
776 {
777   if (GET_CODE (op) == CONST_INT)
778     {
779       HOST_WIDE_INT value = INTVAL (op);
780 
781       if (value == 0xff)
782 	return 1;
783       if (value == 0xffff)
784 	return 1;
785       if (value == 0xffffffff)
786 	return 1;
787       if (value == -1)
788 	return 1;
789     }
790   else if (HOST_BITS_PER_WIDE_INT == 32 && GET_CODE (op) == CONST_DOUBLE)
791     {
792       if (CONST_DOUBLE_LOW (op) == 0xffffffff && CONST_DOUBLE_HIGH (op) == 0)
793 	return 1;
794     }
795 
796   return 0;
797 }
798 
799 /* Return 1 if OP is a multiple of 8 less than 64.  */
800 
801 int
mul8_operand(op,mode)802 mul8_operand (op, mode)
803      register rtx op;
804      enum machine_mode mode ATTRIBUTE_UNUSED;
805 {
806   return (GET_CODE (op) == CONST_INT
807 	  && (unsigned HOST_WIDE_INT) INTVAL (op) < 64
808 	  && (INTVAL (op) & 7) == 0);
809 }
810 
811 /* Return 1 if OP is the zero constant for MODE.  */
812 
813 int
const0_operand(op,mode)814 const0_operand (op, mode)
815      register rtx op;
816      enum machine_mode mode;
817 {
818   return op == CONST0_RTX (mode);
819 }
820 
821 /* Return 1 if OP is a hard floating-point register.  */
822 
823 int
hard_fp_register_operand(op,mode)824 hard_fp_register_operand (op, mode)
825      register rtx op;
826      enum machine_mode mode;
827 {
828   if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
829     return 0;
830 
831   if (GET_CODE (op) == SUBREG)
832     op = SUBREG_REG (op);
833   return GET_CODE (op) == REG && REGNO_REG_CLASS (REGNO (op)) == FLOAT_REGS;
834 }
835 
836 /* Return 1 if OP is a hard general register.  */
837 
838 int
hard_int_register_operand(op,mode)839 hard_int_register_operand (op, mode)
840      register rtx op;
841      enum machine_mode mode;
842 {
843   if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
844     return 0;
845 
846   if (GET_CODE (op) == SUBREG)
847     op = SUBREG_REG (op);
848   return GET_CODE (op) == REG && REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS;
849 }
850 
851 /* Return 1 if OP is a register or a constant integer.  */
852 
853 
854 int
reg_or_cint_operand(op,mode)855 reg_or_cint_operand (op, mode)
856     register rtx op;
857     enum machine_mode mode;
858 {
859      return (GET_CODE (op) == CONST_INT
860 	     || register_operand (op, mode));
861 }
862 
863 /* Return 1 if OP is something that can be reloaded into a register;
864    if it is a MEM, it need not be valid.  */
865 
866 int
some_operand(op,mode)867 some_operand (op, mode)
868      register rtx op;
869      enum machine_mode mode;
870 {
871   if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
872     return 0;
873 
874   switch (GET_CODE (op))
875     {
876     case REG:
877     case MEM:
878     case CONST_INT:
879     case CONST_DOUBLE:
880     case CONST_VECTOR:
881     case LABEL_REF:
882     case SYMBOL_REF:
883     case CONST:
884     case HIGH:
885       return 1;
886 
887     case SUBREG:
888       return some_operand (SUBREG_REG (op), VOIDmode);
889 
890     default:
891       break;
892     }
893 
894   return 0;
895 }
896 
897 /* Likewise, but don't accept constants.  */
898 
899 int
some_ni_operand(op,mode)900 some_ni_operand (op, mode)
901      register rtx op;
902      enum machine_mode mode;
903 {
904   if (GET_MODE (op) != mode && mode != VOIDmode)
905     return 0;
906 
907   if (GET_CODE (op) == SUBREG)
908     op = SUBREG_REG (op);
909 
910   return (GET_CODE (op) == REG || GET_CODE (op) == MEM);
911 }
912 
913 /* Return 1 if OP is a valid operand for the source of a move insn.  */
914 
915 int
input_operand(op,mode)916 input_operand (op, mode)
917      register rtx op;
918      enum machine_mode mode;
919 {
920   if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
921     return 0;
922 
923   if (GET_MODE_CLASS (mode) == MODE_FLOAT && GET_MODE (op) != mode)
924     return 0;
925 
926   switch (GET_CODE (op))
927     {
928     case LABEL_REF:
929     case SYMBOL_REF:
930     case CONST:
931       if (TARGET_EXPLICIT_RELOCS)
932 	{
933 	  /* We don't split symbolic operands into something unintelligable
934 	     until after reload, but we do not wish non-small, non-global
935 	     symbolic operands to be reconstructed from their high/lo_sum
936 	     form.  */
937 	  return (small_symbolic_operand (op, mode)
938 		  || global_symbolic_operand (op, mode)
939 		  || gotdtp_symbolic_operand (op, mode)
940 		  || gottp_symbolic_operand (op, mode));
941 	}
942 
943       /* This handles both the Windows/NT and OSF cases.  */
944       return mode == ptr_mode || mode == DImode;
945 
946     case HIGH:
947       return (TARGET_EXPLICIT_RELOCS
948 	      && local_symbolic_operand (XEXP (op, 0), mode));
949 
950     case REG:
951     case ADDRESSOF:
952       return 1;
953 
954     case SUBREG:
955       if (register_operand (op, mode))
956 	return 1;
957       /* ... fall through ...  */
958     case MEM:
959       return ((TARGET_BWX || (mode != HImode && mode != QImode))
960 	      && general_operand (op, mode));
961 
962     case CONST_DOUBLE:
963     case CONST_VECTOR:
964       return op == CONST0_RTX (mode);
965 
966     case CONST_INT:
967       return mode == QImode || mode == HImode || add_operand (op, mode);
968 
969     case CONSTANT_P_RTX:
970       return 1;
971 
972     default:
973       break;
974     }
975 
976   return 0;
977 }
978 
979 /* Return 1 if OP is a SYMBOL_REF for a function known to be in this
980    file, and in the same section as the current function.  */
981 
982 int
current_file_function_operand(op,mode)983 current_file_function_operand (op, mode)
984      rtx op;
985      enum machine_mode mode ATTRIBUTE_UNUSED;
986 {
987   if (GET_CODE (op) != SYMBOL_REF)
988     return 0;
989 
990   /* Easy test for recursion.  */
991   if (op == XEXP (DECL_RTL (current_function_decl), 0))
992     return 1;
993 
994   /* Otherwise, we need the DECL for the SYMBOL_REF, which we can't get.
995      So SYMBOL_REF_FLAG has been declared to imply that the function is
996      in the default text section.  So we must also check that the current
997      function is also in the text section.  */
998   if (SYMBOL_REF_FLAG (op) && decl_in_text_section (current_function_decl))
999     return 1;
1000 
1001   return 0;
1002 }
1003 
1004 /* Return 1 if OP is a SYMBOL_REF for which we can make a call via bsr.  */
1005 
1006 int
direct_call_operand(op,mode)1007 direct_call_operand (op, mode)
1008      rtx op;
1009      enum machine_mode mode;
1010 {
1011   /* Must be defined in this file.  */
1012   if (! current_file_function_operand (op, mode))
1013     return 0;
1014 
1015   /* If profiling is implemented via linker tricks, we can't jump
1016      to the nogp alternate entry point.  */
1017   /* ??? TARGET_PROFILING_NEEDS_GP isn't really the right test,
1018      but is approximately correct for the OSF ABIs.  Don't know
1019      what to do for VMS, NT, or UMK.  */
1020   if (! TARGET_PROFILING_NEEDS_GP
1021       && ! current_function_profile)
1022     return 0;
1023 
1024   return 1;
1025 }
1026 
1027 /* Return true if OP is a LABEL_REF, or SYMBOL_REF or CONST referencing
1028    a (non-tls) variable known to be defined in this file.  */
1029 
1030 int
local_symbolic_operand(op,mode)1031 local_symbolic_operand (op, mode)
1032      rtx op;
1033      enum machine_mode mode;
1034 {
1035   const char *str;
1036 
1037   if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1038     return 0;
1039 
1040   if (GET_CODE (op) == LABEL_REF)
1041     return 1;
1042 
1043   if (GET_CODE (op) == CONST
1044       && GET_CODE (XEXP (op, 0)) == PLUS
1045       && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
1046     op = XEXP (XEXP (op, 0), 0);
1047 
1048   if (GET_CODE (op) != SYMBOL_REF)
1049     return 0;
1050 
1051   /* Easy pickings.  */
1052   if (CONSTANT_POOL_ADDRESS_P (op) || STRING_POOL_ADDRESS_P (op))
1053     return 1;
1054 
1055   /* ??? SYMBOL_REF_FLAG is set for local function symbols, but we
1056      run into problems with the rtl inliner in that the symbol was
1057      once external, but is local after inlining, which results in
1058      unrecognizable insns.  */
1059 
1060   str = XSTR (op, 0);
1061 
1062   /* If @[LS], then alpha_encode_section_info sez it's local.  */
1063   if (str[0] == '@' && (str[1] == 'L' || str[1] == 'S'))
1064     return 1;
1065 
1066   /* If *$, then ASM_GENERATE_INTERNAL_LABEL sez it's local.  */
1067   if (str[0] == '*' && str[1] == '$')
1068     return 1;
1069 
1070   return 0;
1071 }
1072 
1073 /* Return true if OP is a SYMBOL_REF or CONST referencing a variable
1074    known to be defined in this file in the small data area.  */
1075 
1076 int
small_symbolic_operand(op,mode)1077 small_symbolic_operand (op, mode)
1078      rtx op;
1079      enum machine_mode mode ATTRIBUTE_UNUSED;
1080 {
1081   const char *str;
1082 
1083   if (! TARGET_SMALL_DATA)
1084     return 0;
1085 
1086   if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1087     return 0;
1088 
1089   if (GET_CODE (op) == CONST
1090       && GET_CODE (XEXP (op, 0)) == PLUS
1091       && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
1092     op = XEXP (XEXP (op, 0), 0);
1093 
1094   if (GET_CODE (op) != SYMBOL_REF)
1095     return 0;
1096 
1097   if (CONSTANT_POOL_ADDRESS_P (op))
1098     return GET_MODE_SIZE (get_pool_mode (op)) <= (unsigned) g_switch_value;
1099   else
1100     {
1101       str = XSTR (op, 0);
1102       return str[0] == '@' && str[1] == 'S';
1103     }
1104 }
1105 
1106 /* Return true if OP is a SYMBOL_REF or CONST referencing a variable
1107    not known (or known not) to be defined in this file.  */
1108 
1109 int
global_symbolic_operand(op,mode)1110 global_symbolic_operand (op, mode)
1111      rtx op;
1112      enum machine_mode mode;
1113 {
1114   const char *str;
1115 
1116   if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1117     return 0;
1118 
1119   if (GET_CODE (op) == CONST
1120       && GET_CODE (XEXP (op, 0)) == PLUS
1121       && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
1122     op = XEXP (XEXP (op, 0), 0);
1123 
1124   if (GET_CODE (op) != SYMBOL_REF)
1125     return 0;
1126 
1127   if (local_symbolic_operand (op, mode))
1128     return 0;
1129 
1130   /* Also verify that it's not a TLS symbol.  */
1131   str = XSTR (op, 0);
1132   return str[0] != '%' && str[0] != '@';
1133 }
1134 
1135 /* Return 1 if OP is a valid operand for the MEM of a CALL insn.  */
1136 
1137 int
call_operand(op,mode)1138 call_operand (op, mode)
1139      rtx op;
1140      enum machine_mode mode;
1141 {
1142   if (mode != Pmode)
1143     return 0;
1144 
1145   if (GET_CODE (op) == REG)
1146     {
1147       if (TARGET_ABI_OSF)
1148 	{
1149 	  /* Disallow virtual registers to cope with pathalogical test cases
1150 	     such as compile/930117-1.c in which the virtual reg decomposes
1151 	     to the frame pointer.  Which is a hard reg that is not $27.  */
1152 	  return (REGNO (op) == 27 || REGNO (op) > LAST_VIRTUAL_REGISTER);
1153 	}
1154       else
1155 	return 1;
1156     }
1157   if (TARGET_ABI_UNICOSMK)
1158     return 0;
1159   if (GET_CODE (op) == SYMBOL_REF)
1160     return 1;
1161 
1162   return 0;
1163 }
1164 
1165 /* Returns 1 if OP is a symbolic operand, i.e. a symbol_ref or a label_ref,
1166    possibly with an offset.  */
1167 
1168 int
symbolic_operand(op,mode)1169 symbolic_operand (op, mode)
1170       register rtx op;
1171       enum machine_mode mode;
1172 {
1173   if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1174     return 0;
1175   if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1176     return 1;
1177   if (GET_CODE (op) == CONST
1178       && GET_CODE (XEXP (op,0)) == PLUS
1179       && GET_CODE (XEXP (XEXP (op,0), 0)) == SYMBOL_REF
1180       && GET_CODE (XEXP (XEXP (op,0), 1)) == CONST_INT)
1181     return 1;
1182   return 0;
1183 }
1184 
1185 /* Return true if OP is valid for a particular TLS relocation.  */
1186 
1187 static int
tls_symbolic_operand_1(op,mode,size,unspec)1188 tls_symbolic_operand_1 (op, mode, size, unspec)
1189      rtx op;
1190      enum machine_mode mode;
1191      int size, unspec;
1192 {
1193   const char *str;
1194   int letter;
1195 
1196   if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1197     return 0;
1198 
1199   if (GET_CODE (op) != CONST)
1200     return 0;
1201   op = XEXP (op, 0);
1202 
1203   if (GET_CODE (op) != UNSPEC || XINT (op, 1) != unspec)
1204     return 0;
1205   op = XVECEXP (op, 0, 0);
1206 
1207   if (GET_CODE (op) != SYMBOL_REF)
1208     return 0;
1209   str = XSTR (op, 0);
1210 
1211   if (str[0] == '%')
1212     {
1213       if (size != 64)
1214 	return 0;
1215     }
1216   else if (str[0] == '@')
1217     {
1218       if (alpha_tls_size > size)
1219 	return 0;
1220     }
1221   else
1222     return 0;
1223 
1224   if (unspec == UNSPEC_DTPREL)
1225     return str[1] == 'D';
1226   else if (str[1] == 'I')
1227     return size == 64;
1228   else
1229     return str[1] == 'T';
1230 }
1231 
1232 /* Return true if OP is valid for 16-bit DTP relative relocations.  */
1233 
1234 int
dtp16_symbolic_operand(op,mode)1235 dtp16_symbolic_operand (op, mode)
1236       rtx op;
1237       enum machine_mode mode;
1238 {
1239   return tls_symbolic_operand_1 (op, mode, 16, UNSPEC_DTPREL);
1240 }
1241 
1242 /* Return true if OP is valid for 32-bit DTP relative relocations.  */
1243 
1244 int
dtp32_symbolic_operand(op,mode)1245 dtp32_symbolic_operand (op, mode)
1246       rtx op;
1247       enum machine_mode mode;
1248 {
1249   return tls_symbolic_operand_1 (op, mode, 32, UNSPEC_DTPREL);
1250 }
1251 
1252 /* Return true if OP is valid for 64-bit DTP relative relocations.  */
1253 
1254 int
gotdtp_symbolic_operand(op,mode)1255 gotdtp_symbolic_operand (op, mode)
1256       rtx op;
1257       enum machine_mode mode;
1258 {
1259   return tls_symbolic_operand_1 (op, mode, 64, UNSPEC_DTPREL);
1260 }
1261 
1262 /* Return true if OP is valid for 16-bit TP relative relocations.  */
1263 
1264 int
tp16_symbolic_operand(op,mode)1265 tp16_symbolic_operand (op, mode)
1266       rtx op;
1267       enum machine_mode mode;
1268 {
1269   return tls_symbolic_operand_1 (op, mode, 16, UNSPEC_TPREL);
1270 }
1271 
1272 /* Return true if OP is valid for 32-bit TP relative relocations.  */
1273 
1274 int
tp32_symbolic_operand(op,mode)1275 tp32_symbolic_operand (op, mode)
1276       rtx op;
1277       enum machine_mode mode;
1278 {
1279   return tls_symbolic_operand_1 (op, mode, 32, UNSPEC_TPREL);
1280 }
1281 
1282 /* Return true if OP is valid for 64-bit TP relative relocations.  */
1283 
1284 int
gottp_symbolic_operand(op,mode)1285 gottp_symbolic_operand (op, mode)
1286       rtx op;
1287       enum machine_mode mode;
1288 {
1289   return tls_symbolic_operand_1 (op, mode, 64, UNSPEC_TPREL);
1290 }
1291 
1292 /* Return 1 if OP is a valid Alpha comparison operator.  Here we know which
1293    comparisons are valid in which insn.  */
1294 
1295 int
alpha_comparison_operator(op,mode)1296 alpha_comparison_operator (op, mode)
1297      register rtx op;
1298      enum machine_mode mode;
1299 {
1300   enum rtx_code code = GET_CODE (op);
1301 
1302   if (mode != GET_MODE (op) && mode != VOIDmode)
1303     return 0;
1304 
1305   return (code == EQ || code == LE || code == LT
1306 	  || code == LEU || code == LTU);
1307 }
1308 
1309 /* Return 1 if OP is a valid Alpha comparison operator against zero.
1310    Here we know which comparisons are valid in which insn.  */
1311 
1312 int
alpha_zero_comparison_operator(op,mode)1313 alpha_zero_comparison_operator (op, mode)
1314      register rtx op;
1315      enum machine_mode mode;
1316 {
1317   enum rtx_code code = GET_CODE (op);
1318 
1319   if (mode != GET_MODE (op) && mode != VOIDmode)
1320     return 0;
1321 
1322   return (code == EQ || code == NE || code == LE || code == LT
1323 	  || code == LEU || code == LTU);
1324 }
1325 
1326 /* Return 1 if OP is a valid Alpha swapped comparison operator.  */
1327 
1328 int
alpha_swapped_comparison_operator(op,mode)1329 alpha_swapped_comparison_operator (op, mode)
1330      register rtx op;
1331      enum machine_mode mode;
1332 {
1333   enum rtx_code code = GET_CODE (op);
1334 
1335   if ((mode != GET_MODE (op) && mode != VOIDmode)
1336       || GET_RTX_CLASS (code) != '<')
1337     return 0;
1338 
1339   code = swap_condition (code);
1340   return (code == EQ || code == LE || code == LT
1341 	  || code == LEU || code == LTU);
1342 }
1343 
1344 /* Return 1 if OP is a signed comparison operation.  */
1345 
1346 int
signed_comparison_operator(op,mode)1347 signed_comparison_operator (op, mode)
1348      register rtx op;
1349      enum machine_mode mode ATTRIBUTE_UNUSED;
1350 {
1351   enum rtx_code code = GET_CODE (op);
1352 
1353   if (mode != GET_MODE (op) && mode != VOIDmode)
1354     return 0;
1355 
1356   return (code == EQ || code == NE
1357 	  || code == LE || code == LT
1358 	  || code == GE || code == GT);
1359 }
1360 
1361 /* Return 1 if OP is a valid Alpha floating point comparison operator.
1362    Here we know which comparisons are valid in which insn.  */
1363 
1364 int
alpha_fp_comparison_operator(op,mode)1365 alpha_fp_comparison_operator (op, mode)
1366      register rtx op;
1367      enum machine_mode mode;
1368 {
1369   enum rtx_code code = GET_CODE (op);
1370 
1371   if (mode != GET_MODE (op) && mode != VOIDmode)
1372     return 0;
1373 
1374   return (code == EQ || code == LE || code == LT || code == UNORDERED);
1375 }
1376 
1377 /* Return 1 if this is a divide or modulus operator.  */
1378 
1379 int
divmod_operator(op,mode)1380 divmod_operator (op, mode)
1381      register rtx op;
1382      enum machine_mode mode ATTRIBUTE_UNUSED;
1383 {
1384   switch (GET_CODE (op))
1385     {
1386     case DIV:  case MOD:  case UDIV:  case UMOD:
1387       return 1;
1388 
1389     default:
1390       break;
1391     }
1392 
1393   return 0;
1394 }
1395 
1396 /* Return 1 if this memory address is a known aligned register plus
1397    a constant.  It must be a valid address.  This means that we can do
1398    this as an aligned reference plus some offset.
1399 
1400    Take into account what reload will do.  */
1401 
1402 int
aligned_memory_operand(op,mode)1403 aligned_memory_operand (op, mode)
1404      register rtx op;
1405      enum machine_mode mode;
1406 {
1407   rtx base;
1408 
1409   if (reload_in_progress)
1410     {
1411       rtx tmp = op;
1412       if (GET_CODE (tmp) == SUBREG)
1413 	tmp = SUBREG_REG (tmp);
1414       if (GET_CODE (tmp) == REG
1415 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1416 	{
1417 	  op = reg_equiv_memory_loc[REGNO (tmp)];
1418 	  if (op == 0)
1419 	    return 0;
1420 	}
1421     }
1422 
1423   if (GET_CODE (op) != MEM
1424       || GET_MODE (op) != mode)
1425     return 0;
1426   op = XEXP (op, 0);
1427 
1428   /* LEGITIMIZE_RELOAD_ADDRESS creates (plus (plus reg const_hi) const_lo)
1429      sorts of constructs.  Dig for the real base register.  */
1430   if (reload_in_progress
1431       && GET_CODE (op) == PLUS
1432       && GET_CODE (XEXP (op, 0)) == PLUS)
1433     base = XEXP (XEXP (op, 0), 0);
1434   else
1435     {
1436       if (! memory_address_p (mode, op))
1437 	return 0;
1438       base = (GET_CODE (op) == PLUS ? XEXP (op, 0) : op);
1439     }
1440 
1441   return (GET_CODE (base) == REG && REGNO_POINTER_ALIGN (REGNO (base)) >= 32);
1442 }
1443 
1444 /* Similar, but return 1 if OP is a MEM which is not alignable.  */
1445 
1446 int
unaligned_memory_operand(op,mode)1447 unaligned_memory_operand (op, mode)
1448      register rtx op;
1449      enum machine_mode mode;
1450 {
1451   rtx base;
1452 
1453   if (reload_in_progress)
1454     {
1455       rtx tmp = op;
1456       if (GET_CODE (tmp) == SUBREG)
1457 	tmp = SUBREG_REG (tmp);
1458       if (GET_CODE (tmp) == REG
1459 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1460 	{
1461 	  op = reg_equiv_memory_loc[REGNO (tmp)];
1462 	  if (op == 0)
1463 	    return 0;
1464 	}
1465     }
1466 
1467   if (GET_CODE (op) != MEM
1468       || GET_MODE (op) != mode)
1469     return 0;
1470   op = XEXP (op, 0);
1471 
1472   /* LEGITIMIZE_RELOAD_ADDRESS creates (plus (plus reg const_hi) const_lo)
1473      sorts of constructs.  Dig for the real base register.  */
1474   if (reload_in_progress
1475       && GET_CODE (op) == PLUS
1476       && GET_CODE (XEXP (op, 0)) == PLUS)
1477     base = XEXP (XEXP (op, 0), 0);
1478   else
1479     {
1480       if (! memory_address_p (mode, op))
1481 	return 0;
1482       base = (GET_CODE (op) == PLUS ? XEXP (op, 0) : op);
1483     }
1484 
1485   return (GET_CODE (base) == REG && REGNO_POINTER_ALIGN (REGNO (base)) < 32);
1486 }
1487 
1488 /* Return 1 if OP is either a register or an unaligned memory location.  */
1489 
1490 int
reg_or_unaligned_mem_operand(op,mode)1491 reg_or_unaligned_mem_operand (op, mode)
1492      rtx op;
1493      enum machine_mode mode;
1494 {
1495   return register_operand (op, mode) || unaligned_memory_operand (op, mode);
1496 }
1497 
1498 /* Return 1 if OP is any memory location.  During reload a pseudo matches.  */
1499 
1500 int
any_memory_operand(op,mode)1501 any_memory_operand (op, mode)
1502      register rtx op;
1503      enum machine_mode mode ATTRIBUTE_UNUSED;
1504 {
1505   return (GET_CODE (op) == MEM
1506 	  || (GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == REG)
1507 	  || (reload_in_progress && GET_CODE (op) == REG
1508 	      && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1509 	  || (reload_in_progress && GET_CODE (op) == SUBREG
1510 	      && GET_CODE (SUBREG_REG (op)) == REG
1511 	      && REGNO (SUBREG_REG (op)) >= FIRST_PSEUDO_REGISTER));
1512 }
1513 
1514 /* Returns 1 if OP is not an eliminable register.
1515 
1516    This exists to cure a pathological abort in the s8addq (et al) patterns,
1517 
1518 	long foo () { long t; bar(); return (long) &t * 26107; }
1519 
1520    which run afoul of a hack in reload to cure a (presumably) similar
1521    problem with lea-type instructions on other targets.  But there is
1522    one of us and many of them, so work around the problem by selectively
1523    preventing combine from making the optimization.  */
1524 
1525 int
reg_not_elim_operand(op,mode)1526 reg_not_elim_operand (op, mode)
1527       register rtx op;
1528       enum machine_mode mode;
1529 {
1530   rtx inner = op;
1531   if (GET_CODE (op) == SUBREG)
1532     inner = SUBREG_REG (op);
1533   if (inner == frame_pointer_rtx || inner == arg_pointer_rtx)
1534     return 0;
1535 
1536   return register_operand (op, mode);
1537 }
1538 
1539 /* Return 1 is OP is a memory location that is not a reference (using
1540    an AND) to an unaligned location.  Take into account what reload
1541    will do.  */
1542 
1543 int
normal_memory_operand(op,mode)1544 normal_memory_operand (op, mode)
1545      register rtx op;
1546      enum machine_mode mode ATTRIBUTE_UNUSED;
1547 {
1548   if (reload_in_progress)
1549     {
1550       rtx tmp = op;
1551       if (GET_CODE (tmp) == SUBREG)
1552 	tmp = SUBREG_REG (tmp);
1553       if (GET_CODE (tmp) == REG
1554 	  && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1555 	{
1556 	  op = reg_equiv_memory_loc[REGNO (tmp)];
1557 
1558 	  /* This may not have been assigned an equivalent address if it will
1559 	     be eliminated.  In that case, it doesn't matter what we do.  */
1560 	  if (op == 0)
1561 	    return 1;
1562 	}
1563     }
1564 
1565   return GET_CODE (op) == MEM && GET_CODE (XEXP (op, 0)) != AND;
1566 }
1567 
1568 /* Accept a register, but not a subreg of any kind.  This allows us to
1569    avoid pathological cases in reload wrt data movement common in
1570    int->fp conversion.  */
1571 
1572 int
reg_no_subreg_operand(op,mode)1573 reg_no_subreg_operand (op, mode)
1574      register rtx op;
1575      enum machine_mode mode;
1576 {
1577   if (GET_CODE (op) != REG)
1578     return 0;
1579   return register_operand (op, mode);
1580 }
1581 
1582 /* Recognize an addition operation that includes a constant.  Used to
1583    convince reload to canonize (plus (plus reg c1) c2) during register
1584    elimination.  */
1585 
1586 int
addition_operation(op,mode)1587 addition_operation (op, mode)
1588      register rtx op;
1589      enum machine_mode mode;
1590 {
1591   if (GET_MODE (op) != mode && mode != VOIDmode)
1592     return 0;
1593   if (GET_CODE (op) == PLUS
1594       && register_operand (XEXP (op, 0), mode)
1595       && GET_CODE (XEXP (op, 1)) == CONST_INT
1596       && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op, 1)), 'K'))
1597     return 1;
1598   return 0;
1599 }
1600 
1601 /* Implements CONST_OK_FOR_LETTER_P.  Return true if the value matches
1602    the range defined for C in [I-P].  */
1603 
1604 bool
alpha_const_ok_for_letter_p(value,c)1605 alpha_const_ok_for_letter_p (value, c)
1606      HOST_WIDE_INT value;
1607      int c;
1608 {
1609   switch (c)
1610     {
1611     case 'I':
1612       /* An unsigned 8 bit constant.  */
1613       return (unsigned HOST_WIDE_INT) value < 0x100;
1614     case 'J':
1615       /* The constant zero.  */
1616       return value == 0;
1617     case 'K':
1618       /* A signed 16 bit constant.  */
1619       return (unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000;
1620     case 'L':
1621       /* A shifted signed 16 bit constant appropriate for LDAH.  */
1622       return ((value & 0xffff) == 0
1623               && ((value) >> 31 == -1 || value >> 31 == 0));
1624     case 'M':
1625       /* A constant that can be AND'ed with using a ZAP insn.  */
1626       return zap_mask (value);
1627     case 'N':
1628       /* A complemented unsigned 8 bit constant.  */
1629       return (unsigned HOST_WIDE_INT) (~ value) < 0x100;
1630     case 'O':
1631       /* A negated unsigned 8 bit constant.  */
1632       return (unsigned HOST_WIDE_INT) (- value) < 0x100;
1633     case 'P':
1634       /* The constant 1, 2 or 3.  */
1635       return value == 1 || value == 2 || value == 3;
1636 
1637     default:
1638       return false;
1639     }
1640 }
1641 
1642 /* Implements CONST_DOUBLE_OK_FOR_LETTER_P.  Return true if VALUE
1643    matches for C in [GH].  */
1644 
1645 bool
alpha_const_double_ok_for_letter_p(value,c)1646 alpha_const_double_ok_for_letter_p (value, c)
1647      rtx value;
1648      int c;
1649 {
1650   switch (c)
1651     {
1652     case 'G':
1653       /* The floating point zero constant.  */
1654       return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
1655 	      && value == CONST0_RTX (GET_MODE (value)));
1656 
1657     case 'H':
1658       /* A valid operand of a ZAP insn.  */
1659       return (GET_MODE (value) == VOIDmode
1660 	      && zap_mask (CONST_DOUBLE_LOW (value))
1661 	      && zap_mask (CONST_DOUBLE_HIGH (value)));
1662 
1663     default:
1664       return false;
1665     }
1666 }
1667 
1668 /* Implements CONST_DOUBLE_OK_FOR_LETTER_P.  Return true if VALUE
1669    matches for C.  */
1670 
1671 bool
alpha_extra_constraint(value,c)1672 alpha_extra_constraint (value, c)
1673      rtx value;
1674      int c;
1675 {
1676   switch (c)
1677     {
1678     case 'Q':
1679       return normal_memory_operand (value, VOIDmode);
1680     case 'R':
1681       return direct_call_operand (value, Pmode);
1682     case 'S':
1683       return (GET_CODE (value) == CONST_INT
1684 	      && (unsigned HOST_WIDE_INT) INTVAL (value) < 64);
1685     case 'T':
1686       return GET_CODE (value) == HIGH;
1687     case 'U':
1688       return TARGET_ABI_UNICOSMK && symbolic_operand (value, VOIDmode);
1689     case 'W':
1690       return (GET_CODE (value) == CONST_VECTOR
1691 	      && value == CONST0_RTX (GET_MODE (value)));
1692     default:
1693       return false;
1694     }
1695 }
1696 
1697 /* Return 1 if this function can directly return via $26.  */
1698 
1699 int
direct_return()1700 direct_return ()
1701 {
1702   return (! TARGET_ABI_OPEN_VMS && ! TARGET_ABI_UNICOSMK
1703 	  && reload_completed
1704 	  && alpha_sa_size () == 0
1705 	  && get_frame_size () == 0
1706 	  && current_function_outgoing_args_size == 0
1707 	  && current_function_pretend_args_size == 0);
1708 }
1709 
1710 /* Return the ADDR_VEC associated with a tablejump insn.  */
1711 
1712 rtx
alpha_tablejump_addr_vec(insn)1713 alpha_tablejump_addr_vec (insn)
1714      rtx insn;
1715 {
1716   rtx tmp;
1717 
1718   tmp = JUMP_LABEL (insn);
1719   if (!tmp)
1720     return NULL_RTX;
1721   tmp = NEXT_INSN (tmp);
1722   if (!tmp)
1723     return NULL_RTX;
1724   if (GET_CODE (tmp) == JUMP_INSN
1725       && GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC)
1726     return PATTERN (tmp);
1727   return NULL_RTX;
1728 }
1729 
1730 /* Return the label of the predicted edge, or CONST0_RTX if we don't know.  */
1731 
1732 rtx
alpha_tablejump_best_label(insn)1733 alpha_tablejump_best_label (insn)
1734      rtx insn;
1735 {
1736   rtx jump_table = alpha_tablejump_addr_vec (insn);
1737   rtx best_label = NULL_RTX;
1738 
1739   /* ??? Once the CFG doesn't keep getting completely rebuilt, look
1740      there for edge frequency counts from profile data.  */
1741 
1742   if (jump_table)
1743     {
1744       int n_labels = XVECLEN (jump_table, 1);
1745       int best_count = -1;
1746       int i, j;
1747 
1748       for (i = 0; i < n_labels; i++)
1749 	{
1750 	  int count = 1;
1751 
1752 	  for (j = i + 1; j < n_labels; j++)
1753 	    if (XEXP (XVECEXP (jump_table, 1, i), 0)
1754 		== XEXP (XVECEXP (jump_table, 1, j), 0))
1755 	      count++;
1756 
1757 	  if (count > best_count)
1758 	    best_count = count, best_label = XVECEXP (jump_table, 1, i);
1759 	}
1760     }
1761 
1762   return best_label ? best_label : const0_rtx;
1763 }
1764 
1765 /* Return the TLS model to use for SYMBOL.  */
1766 
1767 static enum tls_model
tls_symbolic_operand_type(symbol)1768 tls_symbolic_operand_type (symbol)
1769      rtx symbol;
1770 {
1771   const char *str;
1772 
1773   if (GET_CODE (symbol) != SYMBOL_REF)
1774     return 0;
1775   str = XSTR (symbol, 0);
1776 
1777   if (str[0] == '%')
1778     {
1779       /* ??? Be prepared for -ftls-model=local-dynamic.  Perhaps we shouldn't
1780 	 have separately encoded local-ness.  On well, maybe the user will use
1781 	 attribute visibility next time.  At least we don't crash...  */
1782       if (str[1] == 'G' || str[1] == 'D')
1783 	return TLS_MODEL_GLOBAL_DYNAMIC;
1784       if (str[1] == 'I' || str[1] == 'T')
1785 	return TLS_MODEL_INITIAL_EXEC;
1786     }
1787   else if (str[0] == '@')
1788     {
1789       if (str[1] == 'D')
1790 	{
1791 	  /* Local dynamic is a waste if we're not going to combine
1792 	     the __tls_get_addr calls.  So avoid it if not optimizing.  */
1793 	  if (optimize)
1794 	    return TLS_MODEL_LOCAL_DYNAMIC;
1795 	  else
1796 	    return TLS_MODEL_GLOBAL_DYNAMIC;
1797 	}
1798       if (str[1] == 'I')
1799 	return TLS_MODEL_INITIAL_EXEC;
1800       if (str[1] == 'T')
1801 	{
1802 	  /* 64-bit local exec is the same as initial exec except without
1803 	     the dynamic relocation.  In either case we use a got entry.  */
1804 	  if (alpha_tls_size == 64)
1805 	    return TLS_MODEL_INITIAL_EXEC;
1806 	  else
1807 	    return TLS_MODEL_LOCAL_EXEC;
1808 	}
1809     }
1810 
1811   return 0;
1812 }
1813 
1814 
1815 /* Return true if the function DECL will be placed in the default text
1816    section.  */
1817 /* ??? Ideally we'd be able to always move from a SYMBOL_REF back to the
1818    decl, as that would allow us to determine if two functions are in the
1819    same section, which is what we really want to know.  */
1820 
1821 static bool
decl_in_text_section(decl)1822 decl_in_text_section (decl)
1823      tree decl;
1824 {
1825   return (DECL_SECTION_NAME (decl) == NULL_TREE
1826 	  && ! (flag_function_sections
1827 	        || (targetm.have_named_sections
1828 		    && DECL_ONE_ONLY (decl))));
1829 }
1830 
1831 /* Return true if EXP should be placed in the small data section.  */
1832 
1833 static bool
alpha_in_small_data_p(exp)1834 alpha_in_small_data_p (exp)
1835      tree exp;
1836 {
1837   /* We want to merge strings, so we never consider them small data.  */
1838   if (TREE_CODE (exp) == STRING_CST)
1839     return false;
1840 
1841   /* Functions are never in the small data area.  Duh.  */
1842   if (TREE_CODE (exp) == FUNCTION_DECL)
1843     return false;
1844 
1845   if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
1846     {
1847       const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
1848       if (strcmp (section, ".sdata") == 0
1849 	  || strcmp (section, ".sbss") == 0)
1850 	return true;
1851     }
1852   else
1853     {
1854       HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
1855 
1856       /* If this is an incomplete type with size 0, then we can't put it
1857 	 in sdata because it might be too big when completed.  */
1858       if (size > 0 && size <= g_switch_value)
1859 	return true;
1860     }
1861 
1862   return false;
1863 }
1864 
1865 /* If we are referencing a function that is static, make the SYMBOL_REF
1866    special.  We use this to see indicate we can branch to this function
1867    without setting PV or restoring GP.
1868 
1869    If this is a variable that is known to be defined locally, add "@v"
1870    to the name.  If in addition the variable is to go in .sdata/.sbss,
1871    then add "@s" instead.  */
1872 
1873 static void
alpha_encode_section_info(decl,first)1874 alpha_encode_section_info (decl, first)
1875      tree decl;
1876      int first ATTRIBUTE_UNUSED;
1877 {
1878   const char *symbol_str;
1879   bool is_local;
1880   char encoding = 0;
1881   rtx rtl, symbol;
1882 
1883   rtl = DECL_P (decl) ? DECL_RTL (decl) : TREE_CST_RTL (decl);
1884 
1885   /* Careful not to prod global register variables.  */
1886   if (GET_CODE (rtl) != MEM)
1887     return;
1888   symbol = XEXP (rtl, 0);
1889   if (GET_CODE (symbol) != SYMBOL_REF)
1890     return;
1891 
1892   if (TREE_CODE (decl) == FUNCTION_DECL)
1893     {
1894       /* We mark public functions once they are emitted; otherwise we
1895 	 don't know that they exist in this unit of translation.  */
1896       if (TREE_PUBLIC (decl))
1897 	return;
1898 
1899       /* Do not mark functions that are not in .text; otherwise we
1900 	 don't know that they are near enough for a direct branch.  */
1901       if (! decl_in_text_section (decl))
1902 	return;
1903 
1904       SYMBOL_REF_FLAG (symbol) = 1;
1905       return;
1906     }
1907 
1908   /* Early out if we're not going to do anything with this data.  */
1909   if (! TARGET_EXPLICIT_RELOCS)
1910     return;
1911 
1912   symbol_str = XSTR (symbol, 0);
1913 
1914   /* A variable is considered "local" if it is defined in this module.  */
1915   is_local = (*targetm.binds_local_p) (decl);
1916 
1917   /* Care for TLS variables.  */
1918   if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL (decl))
1919     {
1920       switch (decl_tls_model (decl))
1921 	{
1922 	case TLS_MODEL_GLOBAL_DYNAMIC:
1923 	  encoding = 'G';
1924 	  break;
1925 	case TLS_MODEL_LOCAL_DYNAMIC:
1926 	  encoding = 'D';
1927 	  break;
1928 	case TLS_MODEL_INITIAL_EXEC:
1929 	  encoding = 'I';
1930 	  break;
1931 	case TLS_MODEL_LOCAL_EXEC:
1932 	  encoding = 'T';
1933 	  break;
1934 	}
1935     }
1936   else if (is_local)
1937     {
1938       /* Determine if DECL will wind up in .sdata/.sbss.  */
1939       if (alpha_in_small_data_p (decl))
1940 	encoding = 'S';
1941       else
1942 	encoding = 'L';
1943     }
1944 
1945   /* Finally, encode this into the symbol string.  */
1946   if (encoding)
1947     {
1948       char *newstr;
1949       size_t len;
1950       char want_prefix = (is_local ? '@' : '%');
1951       char other_prefix = (is_local ? '%' : '@');
1952 
1953       if (symbol_str[0] == want_prefix)
1954 	{
1955 	  if (symbol_str[1] == encoding)
1956 	    return;
1957 	  symbol_str += 2;
1958 	}
1959       else if (symbol_str[0] == other_prefix)
1960 	symbol_str += 2;
1961 
1962       len = strlen (symbol_str) + 1;
1963       newstr = alloca (len + 2);
1964 
1965       newstr[0] = want_prefix;
1966       newstr[1] = encoding;
1967       memcpy (newstr + 2, symbol_str, len);
1968 
1969       XSTR (symbol, 0) = ggc_alloc_string (newstr, len + 2 - 1);
1970     }
1971 }
1972 
1973 /* Undo the effects of the above.  */
1974 
1975 static const char *
alpha_strip_name_encoding(str)1976 alpha_strip_name_encoding (str)
1977      const char *str;
1978 {
1979   if (str[0] == '@' || str[0] == '%')
1980     str += 2;
1981   if (str[0] == '*')
1982     str++;
1983   return str;
1984 }
1985 
1986 #if TARGET_ABI_OPEN_VMS
1987 static bool
alpha_linkage_symbol_p(symname)1988 alpha_linkage_symbol_p (symname)
1989      const char *symname;
1990 {
1991   int symlen = strlen (symname);
1992 
1993   if (symlen > 4)
1994     return strcmp (&symname [symlen - 4], "..lk") == 0;
1995 
1996   return false;
1997 }
1998 
1999 #define LINKAGE_SYMBOL_REF_P(X) \
2000   ((GET_CODE (X) == SYMBOL_REF   \
2001     && alpha_linkage_symbol_p (XSTR (X, 0))) \
2002    || (GET_CODE (X) == CONST                 \
2003        && GET_CODE (XEXP (X, 0)) == PLUS     \
2004        && GET_CODE (XEXP (XEXP (X, 0), 0)) == SYMBOL_REF \
2005        && alpha_linkage_symbol_p (XSTR (XEXP (XEXP (X, 0), 0), 0))))
2006 #endif
2007 
2008 /* legitimate_address_p recognizes an RTL expression that is a valid
2009    memory address for an instruction.  The MODE argument is the
2010    machine mode for the MEM expression that wants to use this address.
2011 
2012    For Alpha, we have either a constant address or the sum of a
2013    register and a constant address, or just a register.  For DImode,
2014    any of those forms can be surrounded with an AND that clear the
2015    low-order three bits; this is an "unaligned" access.  */
2016 
2017 bool
alpha_legitimate_address_p(mode,x,strict)2018 alpha_legitimate_address_p (mode, x, strict)
2019      enum machine_mode mode;
2020      rtx x;
2021      int strict;
2022 {
2023   /* If this is an ldq_u type address, discard the outer AND.  */
2024   if (mode == DImode
2025       && GET_CODE (x) == AND
2026       && GET_CODE (XEXP (x, 1)) == CONST_INT
2027       && INTVAL (XEXP (x, 1)) == -8)
2028     x = XEXP (x, 0);
2029 
2030   /* Discard non-paradoxical subregs.  */
2031   if (GET_CODE (x) == SUBREG
2032       && (GET_MODE_SIZE (GET_MODE (x))
2033 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2034     x = SUBREG_REG (x);
2035 
2036   /* Unadorned general registers are valid.  */
2037   if (REG_P (x)
2038       && (strict
2039 	  ? STRICT_REG_OK_FOR_BASE_P (x)
2040 	  : NONSTRICT_REG_OK_FOR_BASE_P (x)))
2041     return true;
2042 
2043   /* Constant addresses (i.e. +/- 32k) are valid.  */
2044   if (CONSTANT_ADDRESS_P (x))
2045     return true;
2046 
2047 #if TARGET_ABI_OPEN_VMS
2048   if (LINKAGE_SYMBOL_REF_P (x))
2049     return true;
2050 #endif
2051 
2052   /* Register plus a small constant offset is valid.  */
2053   if (GET_CODE (x) == PLUS)
2054     {
2055       rtx ofs = XEXP (x, 1);
2056       x = XEXP (x, 0);
2057 
2058       /* Discard non-paradoxical subregs.  */
2059       if (GET_CODE (x) == SUBREG
2060           && (GET_MODE_SIZE (GET_MODE (x))
2061 	      < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2062 	x = SUBREG_REG (x);
2063 
2064       if (REG_P (x))
2065 	{
2066 	  if (! strict
2067 	      && NONSTRICT_REG_OK_FP_BASE_P (x)
2068 	      && GET_CODE (ofs) == CONST_INT)
2069 	    return true;
2070 	  if ((strict
2071 	       ? STRICT_REG_OK_FOR_BASE_P (x)
2072 	       : NONSTRICT_REG_OK_FOR_BASE_P (x))
2073 	      && CONSTANT_ADDRESS_P (ofs))
2074 	    return true;
2075 	}
2076       else if (GET_CODE (x) == ADDRESSOF
2077 	       && GET_CODE (ofs) == CONST_INT)
2078 	return true;
2079     }
2080 
2081   /* If we're managing explicit relocations, LO_SUM is valid, as
2082      are small data symbols.  */
2083   else if (TARGET_EXPLICIT_RELOCS)
2084     {
2085       if (small_symbolic_operand (x, Pmode))
2086 	return true;
2087 
2088       if (GET_CODE (x) == LO_SUM)
2089 	{
2090 	  rtx ofs = XEXP (x, 1);
2091 	  x = XEXP (x, 0);
2092 
2093 	  /* Discard non-paradoxical subregs.  */
2094 	  if (GET_CODE (x) == SUBREG
2095 	      && (GET_MODE_SIZE (GET_MODE (x))
2096 		  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2097 	    x = SUBREG_REG (x);
2098 
2099 	  /* Must have a valid base register.  */
2100 	  if (! (REG_P (x)
2101 		 && (strict
2102 		     ? STRICT_REG_OK_FOR_BASE_P (x)
2103 		     : NONSTRICT_REG_OK_FOR_BASE_P (x))))
2104 	    return false;
2105 
2106 	  /* The symbol must be local.  */
2107 	  if (local_symbolic_operand (ofs, Pmode)
2108 	      || dtp32_symbolic_operand (ofs, Pmode)
2109 	      || tp32_symbolic_operand (ofs, Pmode))
2110 	    return true;
2111 	}
2112     }
2113 
2114   return false;
2115 }
2116 
2117 /* Try machine-dependent ways of modifying an illegitimate address
2118    to be legitimate.  If we find one, return the new, valid address.  */
2119 
2120 rtx
alpha_legitimize_address(x,scratch,mode)2121 alpha_legitimize_address (x, scratch, mode)
2122      rtx x;
2123      rtx scratch;
2124      enum machine_mode mode ATTRIBUTE_UNUSED;
2125 {
2126   HOST_WIDE_INT addend;
2127 
2128   /* If the address is (plus reg const_int) and the CONST_INT is not a
2129      valid offset, compute the high part of the constant and add it to
2130      the register.  Then our address is (plus temp low-part-const).  */
2131   if (GET_CODE (x) == PLUS
2132       && GET_CODE (XEXP (x, 0)) == REG
2133       && GET_CODE (XEXP (x, 1)) == CONST_INT
2134       && ! CONSTANT_ADDRESS_P (XEXP (x, 1)))
2135     {
2136       addend = INTVAL (XEXP (x, 1));
2137       x = XEXP (x, 0);
2138       goto split_addend;
2139     }
2140 
2141   /* If the address is (const (plus FOO const_int)), find the low-order
2142      part of the CONST_INT.  Then load FOO plus any high-order part of the
2143      CONST_INT into a register.  Our address is (plus reg low-part-const).
2144      This is done to reduce the number of GOT entries.  */
2145   if (!no_new_pseudos
2146       && GET_CODE (x) == CONST
2147       && GET_CODE (XEXP (x, 0)) == PLUS
2148       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2149     {
2150       addend = INTVAL (XEXP (XEXP (x, 0), 1));
2151       x = force_reg (Pmode, XEXP (XEXP (x, 0), 0));
2152       goto split_addend;
2153     }
2154 
2155   /* If we have a (plus reg const), emit the load as in (2), then add
2156      the two registers, and finally generate (plus reg low-part-const) as
2157      our address.  */
2158   if (!no_new_pseudos
2159       && GET_CODE (x) == PLUS
2160       && GET_CODE (XEXP (x, 0)) == REG
2161       && GET_CODE (XEXP (x, 1)) == CONST
2162       && GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
2163       && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)
2164     {
2165       addend = INTVAL (XEXP (XEXP (XEXP (x, 1), 0), 1));
2166       x = expand_simple_binop (Pmode, PLUS, XEXP (x, 0),
2167 			       XEXP (XEXP (XEXP (x, 1), 0), 0),
2168 			       NULL_RTX, 1, OPTAB_LIB_WIDEN);
2169       goto split_addend;
2170     }
2171 
2172   /* If this is a local symbol, split the address into HIGH/LO_SUM parts.  */
2173   if (TARGET_EXPLICIT_RELOCS && symbolic_operand (x, Pmode))
2174     {
2175       rtx r0, r16, eqv, tga, tp, insn, dest, seq;
2176 
2177       switch (tls_symbolic_operand_type (x))
2178 	{
2179 	case TLS_MODEL_GLOBAL_DYNAMIC:
2180 	  start_sequence ();
2181 
2182 	  r0 = gen_rtx_REG (Pmode, 0);
2183 	  r16 = gen_rtx_REG (Pmode, 16);
2184 	  tga = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_addr");
2185 	  dest = gen_reg_rtx (Pmode);
2186 	  seq = GEN_INT (alpha_next_sequence_number++);
2187 
2188 	  emit_insn (gen_movdi_er_tlsgd (r16, pic_offset_table_rtx, x, seq));
2189 	  insn = gen_call_value_osf_tlsgd (r0, tga, seq);
2190 	  insn = emit_call_insn (insn);
2191 	  CONST_OR_PURE_CALL_P (insn) = 1;
2192 	  use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16);
2193 
2194           insn = get_insns ();
2195 	  end_sequence ();
2196 
2197 	  emit_libcall_block (insn, dest, r0, x);
2198 	  return dest;
2199 
2200 	case TLS_MODEL_LOCAL_DYNAMIC:
2201 	  start_sequence ();
2202 
2203 	  r0 = gen_rtx_REG (Pmode, 0);
2204 	  r16 = gen_rtx_REG (Pmode, 16);
2205 	  tga = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_addr");
2206 	  scratch = gen_reg_rtx (Pmode);
2207 	  seq = GEN_INT (alpha_next_sequence_number++);
2208 
2209 	  emit_insn (gen_movdi_er_tlsldm (r16, pic_offset_table_rtx, seq));
2210 	  insn = gen_call_value_osf_tlsldm (r0, tga, seq);
2211 	  insn = emit_call_insn (insn);
2212 	  CONST_OR_PURE_CALL_P (insn) = 1;
2213 	  use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16);
2214 
2215           insn = get_insns ();
2216 	  end_sequence ();
2217 
2218 	  eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2219 				UNSPEC_TLSLDM_CALL);
2220 	  emit_libcall_block (insn, scratch, r0, eqv);
2221 
2222 	  eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_DTPREL);
2223 	  eqv = gen_rtx_CONST (Pmode, eqv);
2224 
2225 	  if (alpha_tls_size == 64)
2226 	    {
2227 	      dest = gen_reg_rtx (Pmode);
2228 	      emit_insn (gen_rtx_SET (VOIDmode, dest, eqv));
2229 	      emit_insn (gen_adddi3 (dest, dest, scratch));
2230 	      return dest;
2231 	    }
2232 	  if (alpha_tls_size == 32)
2233 	    {
2234 	      insn = gen_rtx_HIGH (Pmode, eqv);
2235 	      insn = gen_rtx_PLUS (Pmode, scratch, insn);
2236 	      scratch = gen_reg_rtx (Pmode);
2237 	      emit_insn (gen_rtx_SET (VOIDmode, scratch, insn));
2238 	    }
2239 	  return gen_rtx_LO_SUM (Pmode, scratch, eqv);
2240 
2241 	case TLS_MODEL_INITIAL_EXEC:
2242 	  eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL);
2243 	  eqv = gen_rtx_CONST (Pmode, eqv);
2244 	  tp = gen_reg_rtx (Pmode);
2245 	  scratch = gen_reg_rtx (Pmode);
2246 	  dest = gen_reg_rtx (Pmode);
2247 
2248 	  emit_insn (gen_load_tp (tp));
2249 	  emit_insn (gen_rtx_SET (VOIDmode, scratch, eqv));
2250 	  emit_insn (gen_adddi3 (dest, tp, scratch));
2251 	  return dest;
2252 
2253 	case TLS_MODEL_LOCAL_EXEC:
2254 	  eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL);
2255 	  eqv = gen_rtx_CONST (Pmode, eqv);
2256 	  tp = gen_reg_rtx (Pmode);
2257 
2258 	  emit_insn (gen_load_tp (tp));
2259 	  if (alpha_tls_size == 32)
2260 	    {
2261 	      insn = gen_rtx_HIGH (Pmode, eqv);
2262 	      insn = gen_rtx_PLUS (Pmode, tp, insn);
2263 	      tp = gen_reg_rtx (Pmode);
2264 	      emit_insn (gen_rtx_SET (VOIDmode, tp, insn));
2265 	    }
2266 	  return gen_rtx_LO_SUM (Pmode, tp, eqv);
2267 	}
2268 
2269       if (local_symbolic_operand (x, Pmode))
2270 	{
2271 	  if (small_symbolic_operand (x, Pmode))
2272 	    return x;
2273 	  else
2274 	    {
2275 	      if (!no_new_pseudos)
2276 	        scratch = gen_reg_rtx (Pmode);
2277 	      emit_insn (gen_rtx_SET (VOIDmode, scratch,
2278 				      gen_rtx_HIGH (Pmode, x)));
2279 	      return gen_rtx_LO_SUM (Pmode, scratch, x);
2280 	    }
2281 	}
2282     }
2283 
2284   return NULL;
2285 
2286  split_addend:
2287   {
2288     HOST_WIDE_INT low, high;
2289 
2290     low = ((addend & 0xffff) ^ 0x8000) - 0x8000;
2291     addend -= low;
2292     high = ((addend & 0xffffffff) ^ 0x80000000) - 0x80000000;
2293     addend -= high;
2294 
2295     if (addend)
2296       x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (addend),
2297 			       (no_new_pseudos ? scratch : NULL_RTX),
2298 			       1, OPTAB_LIB_WIDEN);
2299     if (high)
2300       x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (high),
2301 			       (no_new_pseudos ? scratch : NULL_RTX),
2302 			       1, OPTAB_LIB_WIDEN);
2303 
2304     return plus_constant (x, low);
2305   }
2306 }
2307 
2308 /* For TARGET_EXPLICIT_RELOCS, we don't obfuscate a SYMBOL_REF to a
2309    small symbolic operand until after reload.  At which point we need
2310    to replace (mem (symbol_ref)) with (mem (lo_sum $29 symbol_ref))
2311    so that sched2 has the proper dependency information.  */
2312 
2313 int
some_small_symbolic_operand(x,mode)2314 some_small_symbolic_operand (x, mode)
2315      rtx x;
2316      enum machine_mode mode ATTRIBUTE_UNUSED;
2317 {
2318   return for_each_rtx (&x, some_small_symbolic_operand_1, NULL);
2319 }
2320 
2321 static int
some_small_symbolic_operand_1(px,data)2322 some_small_symbolic_operand_1 (px, data)
2323      rtx *px;
2324      void *data ATTRIBUTE_UNUSED;
2325 {
2326   rtx x = *px;
2327 
2328   /* Don't re-split.  */
2329   if (GET_CODE (x) == LO_SUM)
2330     return -1;
2331 
2332   return small_symbolic_operand (x, Pmode) != 0;
2333 }
2334 
2335 rtx
split_small_symbolic_operand(x)2336 split_small_symbolic_operand (x)
2337      rtx x;
2338 {
2339   x = copy_insn (x);
2340   for_each_rtx (&x, split_small_symbolic_operand_1, NULL);
2341   return x;
2342 }
2343 
2344 static int
split_small_symbolic_operand_1(px,data)2345 split_small_symbolic_operand_1 (px, data)
2346      rtx *px;
2347      void *data ATTRIBUTE_UNUSED;
2348 {
2349   rtx x = *px;
2350 
2351   /* Don't re-split.  */
2352   if (GET_CODE (x) == LO_SUM)
2353     return -1;
2354 
2355   if (small_symbolic_operand (x, Pmode))
2356     {
2357       x = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, x);
2358       *px = x;
2359       return -1;
2360     }
2361 
2362   return 0;
2363 }
2364 
2365 /* Try a machine-dependent way of reloading an illegitimate address
2366    operand.  If we find one, push the reload and return the new rtx.  */
2367 
2368 rtx
alpha_legitimize_reload_address(x,mode,opnum,type,ind_levels)2369 alpha_legitimize_reload_address (x, mode, opnum, type, ind_levels)
2370      rtx x;
2371      enum machine_mode mode ATTRIBUTE_UNUSED;
2372      int opnum;
2373      int type;
2374      int ind_levels ATTRIBUTE_UNUSED;
2375 {
2376   /* We must recognize output that we have already generated ourselves.  */
2377   if (GET_CODE (x) == PLUS
2378       && GET_CODE (XEXP (x, 0)) == PLUS
2379       && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2380       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2381       && GET_CODE (XEXP (x, 1)) == CONST_INT)
2382     {
2383       push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2384 		   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2385 		   opnum, type);
2386       return x;
2387     }
2388 
2389   /* We wish to handle large displacements off a base register by
2390      splitting the addend across an ldah and the mem insn.  This
2391      cuts number of extra insns needed from 3 to 1.  */
2392   if (GET_CODE (x) == PLUS
2393       && GET_CODE (XEXP (x, 0)) == REG
2394       && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2395       && REGNO_OK_FOR_BASE_P (REGNO (XEXP (x, 0)))
2396       && GET_CODE (XEXP (x, 1)) == CONST_INT)
2397     {
2398       HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2399       HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2400       HOST_WIDE_INT high
2401 	= (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2402 
2403       /* Check for 32-bit overflow.  */
2404       if (high + low != val)
2405 	return NULL_RTX;
2406 
2407       /* Reload the high part into a base reg; leave the low part
2408 	 in the mem directly.  */
2409       x = gen_rtx_PLUS (GET_MODE (x),
2410 			gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2411 				      GEN_INT (high)),
2412 			GEN_INT (low));
2413 
2414       push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2415 		   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2416 		   opnum, type);
2417       return x;
2418     }
2419 
2420   return NULL_RTX;
2421 }
2422 
2423 /* REF is an alignable memory location.  Place an aligned SImode
2424    reference into *PALIGNED_MEM and the number of bits to shift into
2425    *PBITNUM.  SCRATCH is a free register for use in reloading out
2426    of range stack slots.  */
2427 
2428 void
get_aligned_mem(ref,paligned_mem,pbitnum)2429 get_aligned_mem (ref, paligned_mem, pbitnum)
2430      rtx ref;
2431      rtx *paligned_mem, *pbitnum;
2432 {
2433   rtx base;
2434   HOST_WIDE_INT offset = 0;
2435 
2436   if (GET_CODE (ref) != MEM)
2437     abort ();
2438 
2439   if (reload_in_progress
2440       && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
2441     {
2442       base = find_replacement (&XEXP (ref, 0));
2443 
2444       if (! memory_address_p (GET_MODE (ref), base))
2445 	abort ();
2446     }
2447   else
2448     {
2449       base = XEXP (ref, 0);
2450     }
2451 
2452   if (GET_CODE (base) == PLUS)
2453     offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
2454 
2455   *paligned_mem
2456     = widen_memory_access (ref, SImode, (offset & ~3) - offset);
2457 
2458   if (WORDS_BIG_ENDIAN)
2459     *pbitnum = GEN_INT (32 - (GET_MODE_BITSIZE (GET_MODE (ref))
2460 			      + (offset & 3) * 8));
2461   else
2462     *pbitnum = GEN_INT ((offset & 3) * 8);
2463 }
2464 
2465 /* Similar, but just get the address.  Handle the two reload cases.
2466    Add EXTRA_OFFSET to the address we return.  */
2467 
2468 rtx
get_unaligned_address(ref,extra_offset)2469 get_unaligned_address (ref, extra_offset)
2470      rtx ref;
2471      int extra_offset;
2472 {
2473   rtx base;
2474   HOST_WIDE_INT offset = 0;
2475 
2476   if (GET_CODE (ref) != MEM)
2477     abort ();
2478 
2479   if (reload_in_progress
2480       && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
2481     {
2482       base = find_replacement (&XEXP (ref, 0));
2483 
2484       if (! memory_address_p (GET_MODE (ref), base))
2485 	abort ();
2486     }
2487   else
2488     {
2489       base = XEXP (ref, 0);
2490     }
2491 
2492   if (GET_CODE (base) == PLUS)
2493     offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
2494 
2495   return plus_constant (base, offset + extra_offset);
2496 }
2497 
2498 /* On the Alpha, all (non-symbolic) constants except zero go into
2499    a floating-point register via memory.  Note that we cannot
2500    return anything that is not a subset of CLASS, and that some
2501    symbolic constants cannot be dropped to memory.  */
2502 
2503 enum reg_class
alpha_preferred_reload_class(x,class)2504 alpha_preferred_reload_class(x, class)
2505      rtx x;
2506      enum reg_class class;
2507 {
2508   /* Zero is present in any register class.  */
2509   if (x == CONST0_RTX (GET_MODE (x)))
2510     return class;
2511 
2512   /* These sorts of constants we can easily drop to memory.  */
2513   if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2514     {
2515       if (class == FLOAT_REGS)
2516 	return NO_REGS;
2517       if (class == ALL_REGS)
2518 	return GENERAL_REGS;
2519       return class;
2520     }
2521 
2522   /* All other kinds of constants should not (and in the case of HIGH
2523      cannot) be dropped to memory -- instead we use a GENERAL_REGS
2524      secondary reload.  */
2525   if (CONSTANT_P (x))
2526     return (class == ALL_REGS ? GENERAL_REGS : class);
2527 
2528   return class;
2529 }
2530 
2531 /* Loading and storing HImode or QImode values to and from memory
2532    usually requires a scratch register.  The exceptions are loading
2533    QImode and HImode from an aligned address to a general register
2534    unless byte instructions are permitted.
2535 
2536    We also cannot load an unaligned address or a paradoxical SUBREG
2537    into an FP register.
2538 
2539    We also cannot do integral arithmetic into FP regs, as might result
2540    from register elimination into a DImode fp register.  */
2541 
2542 enum reg_class
secondary_reload_class(class,mode,x,in)2543 secondary_reload_class (class, mode, x, in)
2544      enum reg_class class;
2545      enum machine_mode mode;
2546      rtx x;
2547      int in;
2548 {
2549   if ((mode == QImode || mode == HImode) && ! TARGET_BWX)
2550     {
2551       if (GET_CODE (x) == MEM
2552 	  || (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
2553 	  || (GET_CODE (x) == SUBREG
2554 	      && (GET_CODE (SUBREG_REG (x)) == MEM
2555 		  || (GET_CODE (SUBREG_REG (x)) == REG
2556 		      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER))))
2557 	{
2558 	  if (!in || !aligned_memory_operand(x, mode))
2559 	    return GENERAL_REGS;
2560 	}
2561     }
2562 
2563   if (class == FLOAT_REGS)
2564     {
2565       if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)
2566 	return GENERAL_REGS;
2567 
2568       if (GET_CODE (x) == SUBREG
2569 	  && (GET_MODE_SIZE (GET_MODE (x))
2570 	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2571 	return GENERAL_REGS;
2572 
2573       if (in && INTEGRAL_MODE_P (mode)
2574 	  && ! (memory_operand (x, mode) || x == const0_rtx))
2575 	return GENERAL_REGS;
2576     }
2577 
2578   return NO_REGS;
2579 }
2580 
2581 /* Subfunction of the following function.  Update the flags of any MEM
2582    found in part of X.  */
2583 
2584 static void
alpha_set_memflags_1(x,in_struct_p,volatile_p,unchanging_p)2585 alpha_set_memflags_1 (x, in_struct_p, volatile_p, unchanging_p)
2586      rtx x;
2587      int in_struct_p, volatile_p, unchanging_p;
2588 {
2589   int i;
2590 
2591   switch (GET_CODE (x))
2592     {
2593     case SEQUENCE:
2594       abort ();
2595 
2596     case PARALLEL:
2597       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2598 	alpha_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p,
2599 			      unchanging_p);
2600       break;
2601 
2602     case INSN:
2603       alpha_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p,
2604 			    unchanging_p);
2605       break;
2606 
2607     case SET:
2608       alpha_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p,
2609 			    unchanging_p);
2610       alpha_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p,
2611 			    unchanging_p);
2612       break;
2613 
2614     case MEM:
2615       MEM_IN_STRUCT_P (x) = in_struct_p;
2616       MEM_VOLATILE_P (x) = volatile_p;
2617       RTX_UNCHANGING_P (x) = unchanging_p;
2618       /* Sadly, we cannot use alias sets because the extra aliasing
2619 	 produced by the AND interferes.  Given that two-byte quantities
2620 	 are the only thing we would be able to differentiate anyway,
2621 	 there does not seem to be any point in convoluting the early
2622 	 out of the alias check.  */
2623       break;
2624 
2625     default:
2626       break;
2627     }
2628 }
2629 
2630 /* Given INSN, which is an INSN list or the PATTERN of a single insn
2631    generated to perform a memory operation, look for any MEMs in either
2632    a SET_DEST or a SET_SRC and copy the in-struct, unchanging, and
2633    volatile flags from REF into each of the MEMs found.  If REF is not
2634    a MEM, don't do anything.  */
2635 
2636 void
alpha_set_memflags(insn,ref)2637 alpha_set_memflags (insn, ref)
2638      rtx insn;
2639      rtx ref;
2640 {
2641   int in_struct_p, volatile_p, unchanging_p;
2642 
2643   if (GET_CODE (ref) != MEM)
2644     return;
2645 
2646   in_struct_p = MEM_IN_STRUCT_P (ref);
2647   volatile_p = MEM_VOLATILE_P (ref);
2648   unchanging_p = RTX_UNCHANGING_P (ref);
2649 
2650   /* This is only called from alpha.md, after having had something
2651      generated from one of the insn patterns.  So if everything is
2652      zero, the pattern is already up-to-date.  */
2653   if (! in_struct_p && ! volatile_p && ! unchanging_p)
2654     return;
2655 
2656   alpha_set_memflags_1 (insn, in_struct_p, volatile_p, unchanging_p);
2657 }
2658 
2659 /* Try to output insns to set TARGET equal to the constant C if it can be
2660    done in less than N insns.  Do all computations in MODE.  Returns the place
2661    where the output has been placed if it can be done and the insns have been
2662    emitted.  If it would take more than N insns, zero is returned and no
2663    insns and emitted.  */
2664 
2665 rtx
alpha_emit_set_const(target,mode,c,n)2666 alpha_emit_set_const (target, mode, c, n)
2667      rtx target;
2668      enum machine_mode mode;
2669      HOST_WIDE_INT c;
2670      int n;
2671 {
2672   rtx result = 0;
2673   rtx orig_target = target;
2674   int i;
2675 
2676   /* If we can't make any pseudos, TARGET is an SImode hard register, we
2677      can't load this constant in one insn, do this in DImode.  */
2678   if (no_new_pseudos && mode == SImode
2679       && GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER
2680       && (result = alpha_emit_set_const_1 (target, mode, c, 1)) == 0)
2681     {
2682       target = gen_lowpart (DImode, target);
2683       mode = DImode;
2684     }
2685 
2686   /* Try 1 insn, then 2, then up to N.  */
2687   for (i = 1; i <= n; i++)
2688     {
2689       result = alpha_emit_set_const_1 (target, mode, c, i);
2690       if (result)
2691 	{
2692 	  rtx insn = get_last_insn ();
2693 	  rtx set = single_set (insn);
2694 	  if (! CONSTANT_P (SET_SRC (set)))
2695 	    set_unique_reg_note (get_last_insn (), REG_EQUAL, GEN_INT (c));
2696 	  break;
2697 	}
2698     }
2699 
2700   /* Allow for the case where we changed the mode of TARGET.  */
2701   if (result == target)
2702     result = orig_target;
2703 
2704   return result;
2705 }
2706 
2707 /* Internal routine for the above to check for N or below insns.  */
2708 
2709 static rtx
alpha_emit_set_const_1(target,mode,c,n)2710 alpha_emit_set_const_1 (target, mode, c, n)
2711      rtx target;
2712      enum machine_mode mode;
2713      HOST_WIDE_INT c;
2714      int n;
2715 {
2716   HOST_WIDE_INT new;
2717   int i, bits;
2718   /* Use a pseudo if highly optimizing and still generating RTL.  */
2719   rtx subtarget
2720     = (flag_expensive_optimizations && !no_new_pseudos ? 0 : target);
2721   rtx temp, insn;
2722 
2723   /* If this is a sign-extended 32-bit constant, we can do this in at most
2724      three insns, so do it if we have enough insns left.  We always have
2725      a sign-extended 32-bit constant when compiling on a narrow machine.  */
2726 
2727   if (HOST_BITS_PER_WIDE_INT != 64
2728       || c >> 31 == -1 || c >> 31 == 0)
2729     {
2730       HOST_WIDE_INT low = ((c & 0xffff) ^ 0x8000) - 0x8000;
2731       HOST_WIDE_INT tmp1 = c - low;
2732       HOST_WIDE_INT high = (((tmp1 >> 16) & 0xffff) ^ 0x8000) - 0x8000;
2733       HOST_WIDE_INT extra = 0;
2734 
2735       /* If HIGH will be interpreted as negative but the constant is
2736 	 positive, we must adjust it to do two ldha insns.  */
2737 
2738       if ((high & 0x8000) != 0 && c >= 0)
2739 	{
2740 	  extra = 0x4000;
2741 	  tmp1 -= 0x40000000;
2742 	  high = ((tmp1 >> 16) & 0xffff) - 2 * ((tmp1 >> 16) & 0x8000);
2743 	}
2744 
2745       if (c == low || (low == 0 && extra == 0))
2746 	{
2747 	  /* We used to use copy_to_suggested_reg (GEN_INT (c), target, mode)
2748 	     but that meant that we can't handle INT_MIN on 32-bit machines
2749 	     (like NT/Alpha), because we recurse indefinitely through
2750 	     emit_move_insn to gen_movdi.  So instead, since we know exactly
2751 	     what we want, create it explicitly.  */
2752 
2753 	  if (target == NULL)
2754 	    target = gen_reg_rtx (mode);
2755 	  emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (c)));
2756 	  return target;
2757 	}
2758       else if (n >= 2 + (extra != 0))
2759 	{
2760 	  temp = copy_to_suggested_reg (GEN_INT (high << 16), subtarget, mode);
2761 
2762 	  /* As of 2002-02-23, addsi3 is only available when not optimizing.
2763 	     This means that if we go through expand_binop, we'll try to
2764 	     generate extensions, etc, which will require new pseudos, which
2765 	     will fail during some split phases.  The SImode add patterns
2766 	     still exist, but are not named.  So build the insns by hand.  */
2767 
2768 	  if (extra != 0)
2769 	    {
2770 	      if (! subtarget)
2771 		subtarget = gen_reg_rtx (mode);
2772 	      insn = gen_rtx_PLUS (mode, temp, GEN_INT (extra << 16));
2773 	      insn = gen_rtx_SET (VOIDmode, subtarget, insn);
2774 	      emit_insn (insn);
2775 	      temp = subtarget;
2776 	    }
2777 
2778 	  if (target == NULL)
2779 	    target = gen_reg_rtx (mode);
2780 	  insn = gen_rtx_PLUS (mode, temp, GEN_INT (low));
2781 	  insn = gen_rtx_SET (VOIDmode, target, insn);
2782 	  emit_insn (insn);
2783 	  return target;
2784 	}
2785     }
2786 
2787   /* If we couldn't do it that way, try some other methods.  But if we have
2788      no instructions left, don't bother.  Likewise, if this is SImode and
2789      we can't make pseudos, we can't do anything since the expand_binop
2790      and expand_unop calls will widen and try to make pseudos.  */
2791 
2792   if (n == 1 || (mode == SImode && no_new_pseudos))
2793     return 0;
2794 
2795   /* Next, see if we can load a related constant and then shift and possibly
2796      negate it to get the constant we want.  Try this once each increasing
2797      numbers of insns.  */
2798 
2799   for (i = 1; i < n; i++)
2800     {
2801       /* First, see if minus some low bits, we've an easy load of
2802 	 high bits.  */
2803 
2804       new = ((c & 0xffff) ^ 0x8000) - 0x8000;
2805       if (new != 0
2806           && (temp = alpha_emit_set_const (subtarget, mode, c - new, i)) != 0)
2807 	return expand_binop (mode, add_optab, temp, GEN_INT (new),
2808 			     target, 0, OPTAB_WIDEN);
2809 
2810       /* Next try complementing.  */
2811       if ((temp = alpha_emit_set_const (subtarget, mode, ~ c, i)) != 0)
2812 	return expand_unop (mode, one_cmpl_optab, temp, target, 0);
2813 
2814       /* Next try to form a constant and do a left shift.  We can do this
2815 	 if some low-order bits are zero; the exact_log2 call below tells
2816 	 us that information.  The bits we are shifting out could be any
2817 	 value, but here we'll just try the 0- and sign-extended forms of
2818 	 the constant.  To try to increase the chance of having the same
2819 	 constant in more than one insn, start at the highest number of
2820 	 bits to shift, but try all possibilities in case a ZAPNOT will
2821 	 be useful.  */
2822 
2823       if ((bits = exact_log2 (c & - c)) > 0)
2824 	for (; bits > 0; bits--)
2825 	  if ((temp = (alpha_emit_set_const
2826 		       (subtarget, mode, c >> bits, i))) != 0
2827 	      || ((temp = (alpha_emit_set_const
2828 			  (subtarget, mode,
2829 			   ((unsigned HOST_WIDE_INT) c) >> bits, i)))
2830 		  != 0))
2831 	    return expand_binop (mode, ashl_optab, temp, GEN_INT (bits),
2832 				 target, 0, OPTAB_WIDEN);
2833 
2834       /* Now try high-order zero bits.  Here we try the shifted-in bits as
2835 	 all zero and all ones.  Be careful to avoid shifting outside the
2836 	 mode and to avoid shifting outside the host wide int size.  */
2837       /* On narrow hosts, don't shift a 1 into the high bit, since we'll
2838 	 confuse the recursive call and set all of the high 32 bits.  */
2839 
2840       if ((bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
2841 		   - floor_log2 (c) - 1 - (HOST_BITS_PER_WIDE_INT < 64))) > 0)
2842 	for (; bits > 0; bits--)
2843 	  if ((temp = alpha_emit_set_const (subtarget, mode,
2844 					    c << bits, i)) != 0
2845 	      || ((temp = (alpha_emit_set_const
2846 			   (subtarget, mode,
2847 			    ((c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1)),
2848 			    i)))
2849 		  != 0))
2850 	    return expand_binop (mode, lshr_optab, temp, GEN_INT (bits),
2851 				 target, 1, OPTAB_WIDEN);
2852 
2853       /* Now try high-order 1 bits.  We get that with a sign-extension.
2854 	 But one bit isn't enough here.  Be careful to avoid shifting outside
2855 	 the mode and to avoid shifting outside the host wide int size.  */
2856 
2857       if ((bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
2858 		   - floor_log2 (~ c) - 2)) > 0)
2859 	for (; bits > 0; bits--)
2860 	  if ((temp = alpha_emit_set_const (subtarget, mode,
2861 					    c << bits, i)) != 0
2862 	      || ((temp = (alpha_emit_set_const
2863 			   (subtarget, mode,
2864 			    ((c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1)),
2865 			    i)))
2866 		  != 0))
2867 	    return expand_binop (mode, ashr_optab, temp, GEN_INT (bits),
2868 				 target, 0, OPTAB_WIDEN);
2869     }
2870 
2871 #if HOST_BITS_PER_WIDE_INT == 64
2872   /* Finally, see if can load a value into the target that is the same as the
2873      constant except that all bytes that are 0 are changed to be 0xff.  If we
2874      can, then we can do a ZAPNOT to obtain the desired constant.  */
2875 
2876   new = c;
2877   for (i = 0; i < 64; i += 8)
2878     if ((new & ((HOST_WIDE_INT) 0xff << i)) == 0)
2879       new |= (HOST_WIDE_INT) 0xff << i;
2880 
2881   /* We are only called for SImode and DImode.  If this is SImode, ensure that
2882      we are sign extended to a full word.  */
2883 
2884   if (mode == SImode)
2885     new = ((new & 0xffffffff) ^ 0x80000000) - 0x80000000;
2886 
2887   if (new != c && new != -1
2888       && (temp = alpha_emit_set_const (subtarget, mode, new, n - 1)) != 0)
2889     return expand_binop (mode, and_optab, temp, GEN_INT (c | ~ new),
2890 			 target, 0, OPTAB_WIDEN);
2891 #endif
2892 
2893   return 0;
2894 }
2895 
2896 /* Having failed to find a 3 insn sequence in alpha_emit_set_const,
2897    fall back to a straight forward decomposition.  We do this to avoid
2898    exponential run times encountered when looking for longer sequences
2899    with alpha_emit_set_const.  */
2900 
2901 rtx
alpha_emit_set_long_const(target,c1,c2)2902 alpha_emit_set_long_const (target, c1, c2)
2903      rtx target;
2904      HOST_WIDE_INT c1, c2;
2905 {
2906   HOST_WIDE_INT d1, d2, d3, d4;
2907 
2908   /* Decompose the entire word */
2909 #if HOST_BITS_PER_WIDE_INT >= 64
2910   if (c2 != -(c1 < 0))
2911     abort ();
2912   d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2913   c1 -= d1;
2914   d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2915   c1 = (c1 - d2) >> 32;
2916   d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2917   c1 -= d3;
2918   d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2919   if (c1 != d4)
2920     abort ();
2921 #else
2922   d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2923   c1 -= d1;
2924   d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2925   if (c1 != d2)
2926     abort ();
2927   c2 += (d2 < 0);
2928   d3 = ((c2 & 0xffff) ^ 0x8000) - 0x8000;
2929   c2 -= d3;
2930   d4 = ((c2 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2931   if (c2 != d4)
2932     abort ();
2933 #endif
2934 
2935   /* Construct the high word */
2936   if (d4)
2937     {
2938       emit_move_insn (target, GEN_INT (d4));
2939       if (d3)
2940 	emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d3)));
2941     }
2942   else
2943     emit_move_insn (target, GEN_INT (d3));
2944 
2945   /* Shift it into place */
2946   emit_move_insn (target, gen_rtx_ASHIFT (DImode, target, GEN_INT (32)));
2947 
2948   /* Add in the low bits.  */
2949   if (d2)
2950     emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d2)));
2951   if (d1)
2952     emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d1)));
2953 
2954   return target;
2955 }
2956 
2957 /* Expand a move instruction; return true if all work is done.
2958    We don't handle non-bwx subword loads here.  */
2959 
2960 bool
alpha_expand_mov(mode,operands)2961 alpha_expand_mov (mode, operands)
2962      enum machine_mode mode;
2963      rtx *operands;
2964 {
2965   /* If the output is not a register, the input must be.  */
2966   if (GET_CODE (operands[0]) == MEM
2967       && ! reg_or_0_operand (operands[1], mode))
2968     operands[1] = force_reg (mode, operands[1]);
2969 
2970   /* Allow legitimize_address to perform some simplifications.  */
2971   if (mode == Pmode && symbolic_operand (operands[1], mode))
2972     {
2973       rtx tmp;
2974 
2975       /* With RTL inlining, at -O3, rtl is generated, stored, then actually
2976 	 compiled at the end of compilation.  In the meantime, someone can
2977 	 re-encode-section-info on some symbol changing it e.g. from global
2978 	 to local-not-small.  If this happens, we'd have emitted a plain
2979 	 load rather than a high+losum load and not recognize the insn.  */
2980       if (TARGET_EXPLICIT_RELOCS
2981 	  && rtx_equal_function_value_matters
2982 	  && global_symbolic_operand (operands[1], mode))
2983 	{
2984 	  emit_insn (gen_movdi_er_maybe_g (operands[0], operands[1]));
2985 	  return true;
2986 	}
2987 
2988       tmp = alpha_legitimize_address (operands[1], operands[0], mode);
2989       if (tmp)
2990 	{
2991 	  if (tmp == operands[0])
2992 	    return true;
2993 	  operands[1] = tmp;
2994 	  return false;
2995 	}
2996     }
2997 
2998   /* Early out for non-constants and valid constants.  */
2999   if (! CONSTANT_P (operands[1]) || input_operand (operands[1], mode))
3000     return false;
3001 
3002   /* Split large integers.  */
3003   if (GET_CODE (operands[1]) == CONST_INT
3004       || GET_CODE (operands[1]) == CONST_DOUBLE)
3005     {
3006       HOST_WIDE_INT i0, i1;
3007       rtx temp = NULL_RTX;
3008 
3009       if (GET_CODE (operands[1]) == CONST_INT)
3010 	{
3011 	  i0 = INTVAL (operands[1]);
3012 	  i1 = -(i0 < 0);
3013 	}
3014       else if (HOST_BITS_PER_WIDE_INT >= 64)
3015 	{
3016 	  i0 = CONST_DOUBLE_LOW (operands[1]);
3017 	  i1 = -(i0 < 0);
3018 	}
3019       else
3020 	{
3021 	  i0 = CONST_DOUBLE_LOW (operands[1]);
3022 	  i1 = CONST_DOUBLE_HIGH (operands[1]);
3023 	}
3024 
3025       if (HOST_BITS_PER_WIDE_INT >= 64 || i1 == -(i0 < 0))
3026 	temp = alpha_emit_set_const (operands[0], mode, i0, 3);
3027 
3028       if (!temp && TARGET_BUILD_CONSTANTS)
3029 	temp = alpha_emit_set_long_const (operands[0], i0, i1);
3030 
3031       if (temp)
3032 	{
3033 	  if (rtx_equal_p (operands[0], temp))
3034 	    return true;
3035 	  operands[1] = temp;
3036 	  return false;
3037 	}
3038     }
3039 
3040   /* Otherwise we've nothing left but to drop the thing to memory.  */
3041   operands[1] = force_const_mem (mode, operands[1]);
3042   if (reload_in_progress)
3043     {
3044       emit_move_insn (operands[0], XEXP (operands[1], 0));
3045       operands[1] = copy_rtx (operands[1]);
3046       XEXP (operands[1], 0) = operands[0];
3047     }
3048   else
3049     operands[1] = validize_mem (operands[1]);
3050   return false;
3051 }
3052 
3053 /* Expand a non-bwx QImode or HImode move instruction;
3054    return true if all work is done.  */
3055 
3056 bool
alpha_expand_mov_nobwx(mode,operands)3057 alpha_expand_mov_nobwx (mode, operands)
3058      enum machine_mode mode;
3059      rtx *operands;
3060 {
3061   /* If the output is not a register, the input must be.  */
3062   if (GET_CODE (operands[0]) == MEM)
3063     operands[1] = force_reg (mode, operands[1]);
3064 
3065   /* Handle four memory cases, unaligned and aligned for either the input
3066      or the output.  The only case where we can be called during reload is
3067      for aligned loads; all other cases require temporaries.  */
3068 
3069   if (GET_CODE (operands[1]) == MEM
3070       || (GET_CODE (operands[1]) == SUBREG
3071 	  && GET_CODE (SUBREG_REG (operands[1])) == MEM)
3072       || (reload_in_progress && GET_CODE (operands[1]) == REG
3073 	  && REGNO (operands[1]) >= FIRST_PSEUDO_REGISTER)
3074       || (reload_in_progress && GET_CODE (operands[1]) == SUBREG
3075 	  && GET_CODE (SUBREG_REG (operands[1])) == REG
3076 	  && REGNO (SUBREG_REG (operands[1])) >= FIRST_PSEUDO_REGISTER))
3077     {
3078       if (aligned_memory_operand (operands[1], mode))
3079 	{
3080 	  if (reload_in_progress)
3081 	    {
3082 	      emit_insn ((mode == QImode
3083 			  ? gen_reload_inqi_help
3084 			  : gen_reload_inhi_help)
3085 		         (operands[0], operands[1],
3086 			  gen_rtx_REG (SImode, REGNO (operands[0]))));
3087 	    }
3088 	  else
3089 	    {
3090 	      rtx aligned_mem, bitnum;
3091 	      rtx scratch = gen_reg_rtx (SImode);
3092 
3093 	      get_aligned_mem (operands[1], &aligned_mem, &bitnum);
3094 
3095 	      emit_insn ((mode == QImode
3096 			  ? gen_aligned_loadqi
3097 			  : gen_aligned_loadhi)
3098 			 (operands[0], aligned_mem, bitnum, scratch));
3099 	    }
3100 	}
3101       else
3102 	{
3103 	  /* Don't pass these as parameters since that makes the generated
3104 	     code depend on parameter evaluation order which will cause
3105 	     bootstrap failures.  */
3106 
3107 	  rtx temp1 = gen_reg_rtx (DImode);
3108 	  rtx temp2 = gen_reg_rtx (DImode);
3109 	  rtx seq = ((mode == QImode
3110 		      ? gen_unaligned_loadqi
3111 		      : gen_unaligned_loadhi)
3112 		     (operands[0], get_unaligned_address (operands[1], 0),
3113 		      temp1, temp2));
3114 
3115 	  alpha_set_memflags (seq, operands[1]);
3116 	  emit_insn (seq);
3117 	}
3118       return true;
3119     }
3120 
3121   if (GET_CODE (operands[0]) == MEM
3122       || (GET_CODE (operands[0]) == SUBREG
3123 	  && GET_CODE (SUBREG_REG (operands[0])) == MEM)
3124       || (reload_in_progress && GET_CODE (operands[0]) == REG
3125 	  && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER)
3126       || (reload_in_progress && GET_CODE (operands[0]) == SUBREG
3127 	  && GET_CODE (SUBREG_REG (operands[0])) == REG
3128 	  && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER))
3129     {
3130       if (aligned_memory_operand (operands[0], mode))
3131 	{
3132 	  rtx aligned_mem, bitnum;
3133 	  rtx temp1 = gen_reg_rtx (SImode);
3134 	  rtx temp2 = gen_reg_rtx (SImode);
3135 
3136 	  get_aligned_mem (operands[0], &aligned_mem, &bitnum);
3137 
3138 	  emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
3139 					temp1, temp2));
3140 	}
3141       else
3142 	{
3143 	  rtx temp1 = gen_reg_rtx (DImode);
3144 	  rtx temp2 = gen_reg_rtx (DImode);
3145 	  rtx temp3 = gen_reg_rtx (DImode);
3146 	  rtx seq = ((mode == QImode
3147 		      ? gen_unaligned_storeqi
3148 		      : gen_unaligned_storehi)
3149 		     (get_unaligned_address (operands[0], 0),
3150 		      operands[1], temp1, temp2, temp3));
3151 
3152 	  alpha_set_memflags (seq, operands[0]);
3153 	  emit_insn (seq);
3154 	}
3155       return true;
3156     }
3157 
3158   return false;
3159 }
3160 
3161 /* Generate an unsigned DImode to FP conversion.  This is the same code
3162    optabs would emit if we didn't have TFmode patterns.
3163 
3164    For SFmode, this is the only construction I've found that can pass
3165    gcc.c-torture/execute/ieee/rbug.c.  No scenario that uses DFmode
3166    intermediates will work, because you'll get intermediate rounding
3167    that ruins the end result.  Some of this could be fixed by turning
3168    on round-to-positive-infinity, but that requires diddling the fpsr,
3169    which kills performance.  I tried turning this around and converting
3170    to a negative number, so that I could turn on /m, but either I did
3171    it wrong or there's something else cause I wound up with the exact
3172    same single-bit error.  There is a branch-less form of this same code:
3173 
3174 	srl     $16,1,$1
3175 	and     $16,1,$2
3176 	cmplt   $16,0,$3
3177 	or      $1,$2,$2
3178 	cmovge  $16,$16,$2
3179 	itoft	$3,$f10
3180 	itoft	$2,$f11
3181 	cvtqs   $f11,$f11
3182 	adds    $f11,$f11,$f0
3183 	fcmoveq $f10,$f11,$f0
3184 
3185    I'm not using it because it's the same number of instructions as
3186    this branch-full form, and it has more serialized long latency
3187    instructions on the critical path.
3188 
3189    For DFmode, we can avoid rounding errors by breaking up the word
3190    into two pieces, converting them separately, and adding them back:
3191 
3192    LC0: .long 0,0x5f800000
3193 
3194 	itoft	$16,$f11
3195 	lda	$2,LC0
3196 	cmplt	$16,0,$1
3197 	cpyse	$f11,$f31,$f10
3198 	cpyse	$f31,$f11,$f11
3199 	s4addq	$1,$2,$1
3200 	lds	$f12,0($1)
3201 	cvtqt	$f10,$f10
3202 	cvtqt	$f11,$f11
3203 	addt	$f12,$f10,$f0
3204 	addt	$f0,$f11,$f0
3205 
3206    This doesn't seem to be a clear-cut win over the optabs form.
3207    It probably all depends on the distribution of numbers being
3208    converted -- in the optabs form, all but high-bit-set has a
3209    much lower minimum execution time.  */
3210 
3211 void
alpha_emit_floatuns(operands)3212 alpha_emit_floatuns (operands)
3213      rtx operands[2];
3214 {
3215   rtx neglab, donelab, i0, i1, f0, in, out;
3216   enum machine_mode mode;
3217 
3218   out = operands[0];
3219   in = force_reg (DImode, operands[1]);
3220   mode = GET_MODE (out);
3221   neglab = gen_label_rtx ();
3222   donelab = gen_label_rtx ();
3223   i0 = gen_reg_rtx (DImode);
3224   i1 = gen_reg_rtx (DImode);
3225   f0 = gen_reg_rtx (mode);
3226 
3227   emit_cmp_and_jump_insns (in, const0_rtx, LT, const0_rtx, DImode, 0, neglab);
3228 
3229   emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_FLOAT (mode, in)));
3230   emit_jump_insn (gen_jump (donelab));
3231   emit_barrier ();
3232 
3233   emit_label (neglab);
3234 
3235   emit_insn (gen_lshrdi3 (i0, in, const1_rtx));
3236   emit_insn (gen_anddi3 (i1, in, const1_rtx));
3237   emit_insn (gen_iordi3 (i0, i0, i1));
3238   emit_insn (gen_rtx_SET (VOIDmode, f0, gen_rtx_FLOAT (mode, i0)));
3239   emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_PLUS (mode, f0, f0)));
3240 
3241   emit_label (donelab);
3242 }
3243 
3244 /* Generate the comparison for a conditional branch.  */
3245 
3246 rtx
alpha_emit_conditional_branch(code)3247 alpha_emit_conditional_branch (code)
3248      enum rtx_code code;
3249 {
3250   enum rtx_code cmp_code, branch_code;
3251   enum machine_mode cmp_mode, branch_mode = VOIDmode;
3252   rtx op0 = alpha_compare.op0, op1 = alpha_compare.op1;
3253   rtx tem;
3254 
3255   if (alpha_compare.fp_p && GET_MODE (op0) == TFmode)
3256     {
3257       if (! TARGET_HAS_XFLOATING_LIBS)
3258 	abort ();
3259 
3260       /* X_floating library comparison functions return
3261 	   -1  unordered
3262 	    0  false
3263 	    1  true
3264 	 Convert the compare against the raw return value.  */
3265 
3266       switch (code)
3267 	{
3268 	case UNORDERED:
3269 	  cmp_code = EQ;
3270 	  code = LT;
3271 	  break;
3272 	case ORDERED:
3273 	  cmp_code = EQ;
3274 	  code = GE;
3275 	  break;
3276 	case NE:
3277 	  cmp_code = NE;
3278 	  code = NE;
3279 	  break;
3280 	default:
3281 	  cmp_code = code;
3282 	  code = GT;
3283 	  break;
3284 	}
3285 
3286       op0 = alpha_emit_xfloating_compare (cmp_code, op0, op1);
3287       op1 = const0_rtx;
3288       alpha_compare.fp_p = 0;
3289     }
3290 
3291   /* The general case: fold the comparison code to the types of compares
3292      that we have, choosing the branch as necessary.  */
3293   switch (code)
3294     {
3295     case EQ:  case LE:  case LT:  case LEU:  case LTU:
3296     case UNORDERED:
3297       /* We have these compares: */
3298       cmp_code = code, branch_code = NE;
3299       break;
3300 
3301     case NE:
3302     case ORDERED:
3303       /* These must be reversed.  */
3304       cmp_code = reverse_condition (code), branch_code = EQ;
3305       break;
3306 
3307     case GE:  case GT: case GEU:  case GTU:
3308       /* For FP, we swap them, for INT, we reverse them.  */
3309       if (alpha_compare.fp_p)
3310 	{
3311 	  cmp_code = swap_condition (code);
3312 	  branch_code = NE;
3313 	  tem = op0, op0 = op1, op1 = tem;
3314 	}
3315       else
3316 	{
3317 	  cmp_code = reverse_condition (code);
3318 	  branch_code = EQ;
3319 	}
3320       break;
3321 
3322     default:
3323       abort ();
3324     }
3325 
3326   if (alpha_compare.fp_p)
3327     {
3328       cmp_mode = DFmode;
3329       if (flag_unsafe_math_optimizations)
3330 	{
3331 	  /* When we are not as concerned about non-finite values, and we
3332 	     are comparing against zero, we can branch directly.  */
3333 	  if (op1 == CONST0_RTX (DFmode))
3334 	    cmp_code = NIL, branch_code = code;
3335 	  else if (op0 == CONST0_RTX (DFmode))
3336 	    {
3337 	      /* Undo the swap we probably did just above.  */
3338 	      tem = op0, op0 = op1, op1 = tem;
3339 	      branch_code = swap_condition (cmp_code);
3340 	      cmp_code = NIL;
3341 	    }
3342 	}
3343       else
3344 	{
3345 	  /* ??? We mark the branch mode to be CCmode to prevent the
3346 	     compare and branch from being combined, since the compare
3347 	     insn follows IEEE rules that the branch does not.  */
3348 	  branch_mode = CCmode;
3349 	}
3350     }
3351   else
3352     {
3353       cmp_mode = DImode;
3354 
3355       /* The following optimizations are only for signed compares.  */
3356       if (code != LEU && code != LTU && code != GEU && code != GTU)
3357 	{
3358 	  /* Whee.  Compare and branch against 0 directly.  */
3359 	  if (op1 == const0_rtx)
3360 	    cmp_code = NIL, branch_code = code;
3361 
3362 	  /* If the constants doesn't fit into an immediate, but can
3363  	     be generated by lda/ldah, we adjust the argument and
3364  	     compare against zero, so we can use beq/bne directly.  */
3365 	  else if (GET_CODE (op1) == CONST_INT && (code == EQ || code == NE))
3366 	    {
3367 	      HOST_WIDE_INT v = INTVAL (op1), n = -v;
3368 
3369 	      if (! CONST_OK_FOR_LETTER_P (v, 'I')
3370 		  && (CONST_OK_FOR_LETTER_P (n, 'K')
3371 		      || CONST_OK_FOR_LETTER_P (n, 'L')))
3372 		{
3373 		  cmp_code = PLUS, branch_code = code;
3374 		  op1 = GEN_INT (n);
3375 		}
3376 	    }
3377 	}
3378 
3379       if (!reg_or_0_operand (op0, DImode))
3380 	op0 = force_reg (DImode, op0);
3381       if (cmp_code != PLUS && !reg_or_8bit_operand (op1, DImode))
3382 	op1 = force_reg (DImode, op1);
3383     }
3384 
3385   /* Emit an initial compare instruction, if necessary.  */
3386   tem = op0;
3387   if (cmp_code != NIL)
3388     {
3389       tem = gen_reg_rtx (cmp_mode);
3390       emit_move_insn (tem, gen_rtx_fmt_ee (cmp_code, cmp_mode, op0, op1));
3391     }
3392 
3393   /* Zero the operands.  */
3394   memset (&alpha_compare, 0, sizeof (alpha_compare));
3395 
3396   /* Return the branch comparison.  */
3397   return gen_rtx_fmt_ee (branch_code, branch_mode, tem, CONST0_RTX (cmp_mode));
3398 }
3399 
3400 /* Certain simplifications can be done to make invalid setcc operations
3401    valid.  Return the final comparison, or NULL if we can't work.  */
3402 
3403 rtx
alpha_emit_setcc(code)3404 alpha_emit_setcc (code)
3405      enum rtx_code code;
3406 {
3407   enum rtx_code cmp_code;
3408   rtx op0 = alpha_compare.op0, op1 = alpha_compare.op1;
3409   int fp_p = alpha_compare.fp_p;
3410   rtx tmp;
3411 
3412   /* Zero the operands.  */
3413   memset (&alpha_compare, 0, sizeof (alpha_compare));
3414 
3415   if (fp_p && GET_MODE (op0) == TFmode)
3416     {
3417       if (! TARGET_HAS_XFLOATING_LIBS)
3418 	abort ();
3419 
3420       /* X_floating library comparison functions return
3421 	   -1  unordered
3422 	    0  false
3423 	    1  true
3424 	 Convert the compare against the raw return value.  */
3425 
3426       if (code == UNORDERED || code == ORDERED)
3427 	cmp_code = EQ;
3428       else
3429 	cmp_code = code;
3430 
3431       op0 = alpha_emit_xfloating_compare (cmp_code, op0, op1);
3432       op1 = const0_rtx;
3433       fp_p = 0;
3434 
3435       if (code == UNORDERED)
3436 	code = LT;
3437       else if (code == ORDERED)
3438 	code = GE;
3439       else
3440         code = GT;
3441     }
3442 
3443   if (fp_p && !TARGET_FIX)
3444     return NULL_RTX;
3445 
3446   /* The general case: fold the comparison code to the types of compares
3447      that we have, choosing the branch as necessary.  */
3448 
3449   cmp_code = NIL;
3450   switch (code)
3451     {
3452     case EQ:  case LE:  case LT:  case LEU:  case LTU:
3453     case UNORDERED:
3454       /* We have these compares.  */
3455       if (fp_p)
3456 	cmp_code = code, code = NE;
3457       break;
3458 
3459     case NE:
3460       if (!fp_p && op1 == const0_rtx)
3461 	break;
3462       /* FALLTHRU */
3463 
3464     case ORDERED:
3465       cmp_code = reverse_condition (code);
3466       code = EQ;
3467       break;
3468 
3469     case GE:  case GT: case GEU:  case GTU:
3470       /* These normally need swapping, but for integer zero we have
3471 	 special patterns that recognize swapped operands.  */
3472       if (!fp_p && op1 == const0_rtx)
3473 	break;
3474       code = swap_condition (code);
3475       if (fp_p)
3476 	cmp_code = code, code = NE;
3477       tmp = op0, op0 = op1, op1 = tmp;
3478       break;
3479 
3480     default:
3481       abort ();
3482     }
3483 
3484   if (!fp_p)
3485     {
3486       if (!register_operand (op0, DImode))
3487 	op0 = force_reg (DImode, op0);
3488       if (!reg_or_8bit_operand (op1, DImode))
3489 	op1 = force_reg (DImode, op1);
3490     }
3491 
3492   /* Emit an initial compare instruction, if necessary.  */
3493   if (cmp_code != NIL)
3494     {
3495       enum machine_mode mode = fp_p ? DFmode : DImode;
3496 
3497       tmp = gen_reg_rtx (mode);
3498       emit_insn (gen_rtx_SET (VOIDmode, tmp,
3499 			      gen_rtx_fmt_ee (cmp_code, mode, op0, op1)));
3500 
3501       op0 = fp_p ? gen_lowpart (DImode, tmp) : tmp;
3502       op1 = const0_rtx;
3503     }
3504 
3505   /* Return the setcc comparison.  */
3506   return gen_rtx_fmt_ee (code, DImode, op0, op1);
3507 }
3508 
3509 
3510 /* Rewrite a comparison against zero CMP of the form
3511    (CODE (cc0) (const_int 0)) so it can be written validly in
3512    a conditional move (if_then_else CMP ...).
3513    If both of the operands that set cc0 are nonzero we must emit
3514    an insn to perform the compare (it can't be done within
3515    the conditional move).  */
3516 rtx
alpha_emit_conditional_move(cmp,mode)3517 alpha_emit_conditional_move (cmp, mode)
3518      rtx cmp;
3519      enum machine_mode mode;
3520 {
3521   enum rtx_code code = GET_CODE (cmp);
3522   enum rtx_code cmov_code = NE;
3523   rtx op0 = alpha_compare.op0;
3524   rtx op1 = alpha_compare.op1;
3525   int fp_p = alpha_compare.fp_p;
3526   enum machine_mode cmp_mode
3527     = (GET_MODE (op0) == VOIDmode ? DImode : GET_MODE (op0));
3528   enum machine_mode cmp_op_mode = fp_p ? DFmode : DImode;
3529   enum machine_mode cmov_mode = VOIDmode;
3530   int local_fast_math = flag_unsafe_math_optimizations;
3531   rtx tem;
3532 
3533   /* Zero the operands.  */
3534   memset (&alpha_compare, 0, sizeof (alpha_compare));
3535 
3536   if (fp_p != FLOAT_MODE_P (mode))
3537     {
3538       enum rtx_code cmp_code;
3539 
3540       if (! TARGET_FIX)
3541 	return 0;
3542 
3543       /* If we have fp<->int register move instructions, do a cmov by
3544 	 performing the comparison in fp registers, and move the
3545 	 zero/nonzero value to integer registers, where we can then
3546 	 use a normal cmov, or vice-versa.  */
3547 
3548       switch (code)
3549 	{
3550 	case EQ: case LE: case LT: case LEU: case LTU:
3551 	  /* We have these compares.  */
3552 	  cmp_code = code, code = NE;
3553 	  break;
3554 
3555 	case NE:
3556 	  /* This must be reversed.  */
3557 	  cmp_code = EQ, code = EQ;
3558 	  break;
3559 
3560 	case GE: case GT: case GEU: case GTU:
3561 	  /* These normally need swapping, but for integer zero we have
3562 	     special patterns that recognize swapped operands.  */
3563 	  if (!fp_p && op1 == const0_rtx)
3564 	    cmp_code = code, code = NE;
3565 	  else
3566 	    {
3567 	      cmp_code = swap_condition (code);
3568 	      code = NE;
3569 	      tem = op0, op0 = op1, op1 = tem;
3570 	    }
3571 	  break;
3572 
3573 	default:
3574 	  abort ();
3575 	}
3576 
3577       tem = gen_reg_rtx (cmp_op_mode);
3578       emit_insn (gen_rtx_SET (VOIDmode, tem,
3579 			      gen_rtx_fmt_ee (cmp_code, cmp_op_mode,
3580 					      op0, op1)));
3581 
3582       cmp_mode = cmp_op_mode = fp_p ? DImode : DFmode;
3583       op0 = gen_lowpart (cmp_op_mode, tem);
3584       op1 = CONST0_RTX (cmp_op_mode);
3585       fp_p = !fp_p;
3586       local_fast_math = 1;
3587     }
3588 
3589   /* We may be able to use a conditional move directly.
3590      This avoids emitting spurious compares.  */
3591   if (signed_comparison_operator (cmp, VOIDmode)
3592       && (!fp_p || local_fast_math)
3593       && (op0 == CONST0_RTX (cmp_mode) || op1 == CONST0_RTX (cmp_mode)))
3594     return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
3595 
3596   /* We can't put the comparison inside the conditional move;
3597      emit a compare instruction and put that inside the
3598      conditional move.  Make sure we emit only comparisons we have;
3599      swap or reverse as necessary.  */
3600 
3601   if (no_new_pseudos)
3602     return NULL_RTX;
3603 
3604   switch (code)
3605     {
3606     case EQ:  case LE:  case LT:  case LEU:  case LTU:
3607       /* We have these compares: */
3608       break;
3609 
3610     case NE:
3611       /* This must be reversed.  */
3612       code = reverse_condition (code);
3613       cmov_code = EQ;
3614       break;
3615 
3616     case GE:  case GT:  case GEU:  case GTU:
3617       /* These must be swapped.  */
3618       if (op1 != CONST0_RTX (cmp_mode))
3619 	{
3620 	  code = swap_condition (code);
3621 	  tem = op0, op0 = op1, op1 = tem;
3622 	}
3623       break;
3624 
3625     default:
3626       abort ();
3627     }
3628 
3629   if (!fp_p)
3630     {
3631       if (!reg_or_0_operand (op0, DImode))
3632 	op0 = force_reg (DImode, op0);
3633       if (!reg_or_8bit_operand (op1, DImode))
3634 	op1 = force_reg (DImode, op1);
3635     }
3636 
3637   /* ??? We mark the branch mode to be CCmode to prevent the compare
3638      and cmov from being combined, since the compare insn follows IEEE
3639      rules that the cmov does not.  */
3640   if (fp_p && !local_fast_math)
3641     cmov_mode = CCmode;
3642 
3643   tem = gen_reg_rtx (cmp_op_mode);
3644   emit_move_insn (tem, gen_rtx_fmt_ee (code, cmp_op_mode, op0, op1));
3645   return gen_rtx_fmt_ee (cmov_code, cmov_mode, tem, CONST0_RTX (cmp_op_mode));
3646 }
3647 
3648 /* Simplify a conditional move of two constants into a setcc with
3649    arithmetic.  This is done with a splitter since combine would
3650    just undo the work if done during code generation.  It also catches
3651    cases we wouldn't have before cse.  */
3652 
3653 int
alpha_split_conditional_move(code,dest,cond,t_rtx,f_rtx)3654 alpha_split_conditional_move (code, dest, cond, t_rtx, f_rtx)
3655      enum rtx_code code;
3656      rtx dest, cond, t_rtx, f_rtx;
3657 {
3658   HOST_WIDE_INT t, f, diff;
3659   enum machine_mode mode;
3660   rtx target, subtarget, tmp;
3661 
3662   mode = GET_MODE (dest);
3663   t = INTVAL (t_rtx);
3664   f = INTVAL (f_rtx);
3665   diff = t - f;
3666 
3667   if (((code == NE || code == EQ) && diff < 0)
3668       || (code == GE || code == GT))
3669     {
3670       code = reverse_condition (code);
3671       diff = t, t = f, f = diff;
3672       diff = t - f;
3673     }
3674 
3675   subtarget = target = dest;
3676   if (mode != DImode)
3677     {
3678       target = gen_lowpart (DImode, dest);
3679       if (! no_new_pseudos)
3680         subtarget = gen_reg_rtx (DImode);
3681       else
3682 	subtarget = target;
3683     }
3684   /* Below, we must be careful to use copy_rtx on target and subtarget
3685      in intermediate insns, as they may be a subreg rtx, which may not
3686      be shared.  */
3687 
3688   if (f == 0 && exact_log2 (diff) > 0
3689       /* On EV6, we've got enough shifters to make non-arithmatic shifts
3690 	 viable over a longer latency cmove.  On EV5, the E0 slot is a
3691 	 scarce resource, and on EV4 shift has the same latency as a cmove.  */
3692       && (diff <= 8 || alpha_cpu == PROCESSOR_EV6))
3693     {
3694       tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
3695       emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
3696 
3697       tmp = gen_rtx_ASHIFT (DImode, copy_rtx (subtarget),
3698 			    GEN_INT (exact_log2 (t)));
3699       emit_insn (gen_rtx_SET (VOIDmode, target, tmp));
3700     }
3701   else if (f == 0 && t == -1)
3702     {
3703       tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
3704       emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
3705 
3706       emit_insn (gen_negdi2 (target, copy_rtx (subtarget)));
3707     }
3708   else if (diff == 1 || diff == 4 || diff == 8)
3709     {
3710       rtx add_op;
3711 
3712       tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
3713       emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
3714 
3715       if (diff == 1)
3716 	emit_insn (gen_adddi3 (target, copy_rtx (subtarget), GEN_INT (f)));
3717       else
3718 	{
3719 	  add_op = GEN_INT (f);
3720 	  if (sext_add_operand (add_op, mode))
3721 	    {
3722 	      tmp = gen_rtx_MULT (DImode, copy_rtx (subtarget),
3723 				  GEN_INT (diff));
3724 	      tmp = gen_rtx_PLUS (DImode, tmp, add_op);
3725 	      emit_insn (gen_rtx_SET (VOIDmode, target, tmp));
3726 	    }
3727 	  else
3728 	    return 0;
3729 	}
3730     }
3731   else
3732     return 0;
3733 
3734   return 1;
3735 }
3736 
3737 /* Look up the function X_floating library function name for the
3738    given operation.  */
3739 
3740 static const char *
alpha_lookup_xfloating_lib_func(code)3741 alpha_lookup_xfloating_lib_func (code)
3742      enum rtx_code code;
3743 {
3744   struct xfloating_op
3745     {
3746       const enum rtx_code code;
3747       const char *const func;
3748     };
3749 
3750   static const struct xfloating_op vms_xfloating_ops[] =
3751     {
3752       { PLUS,		"OTS$ADD_X" },
3753       { MINUS,		"OTS$SUB_X" },
3754       { MULT,		"OTS$MUL_X" },
3755       { DIV,		"OTS$DIV_X" },
3756       { EQ,		"OTS$EQL_X" },
3757       { NE,		"OTS$NEQ_X" },
3758       { LT,		"OTS$LSS_X" },
3759       { LE,		"OTS$LEQ_X" },
3760       { GT,		"OTS$GTR_X" },
3761       { GE,		"OTS$GEQ_X" },
3762       { FIX,		"OTS$CVTXQ" },
3763       { FLOAT,		"OTS$CVTQX" },
3764       { UNSIGNED_FLOAT,	"OTS$CVTQUX" },
3765       { FLOAT_EXTEND,	"OTS$CVT_FLOAT_T_X" },
3766       { FLOAT_TRUNCATE,	"OTS$CVT_FLOAT_X_T" },
3767     };
3768 
3769   static const struct xfloating_op osf_xfloating_ops[] =
3770     {
3771       { PLUS,		"_OtsAddX" },
3772       { MINUS,		"_OtsSubX" },
3773       { MULT,		"_OtsMulX" },
3774       { DIV,		"_OtsDivX" },
3775       { EQ,		"_OtsEqlX" },
3776       { NE,		"_OtsNeqX" },
3777       { LT,		"_OtsLssX" },
3778       { LE,		"_OtsLeqX" },
3779       { GT,		"_OtsGtrX" },
3780       { GE,		"_OtsGeqX" },
3781       { FIX,		"_OtsCvtXQ" },
3782       { FLOAT,		"_OtsCvtQX" },
3783       { UNSIGNED_FLOAT,	"_OtsCvtQUX" },
3784       { FLOAT_EXTEND,	"_OtsConvertFloatTX" },
3785       { FLOAT_TRUNCATE,	"_OtsConvertFloatXT" },
3786     };
3787 
3788   const struct xfloating_op *ops;
3789   const long n = ARRAY_SIZE (osf_xfloating_ops);
3790   long i;
3791 
3792   /* How irritating.  Nothing to key off for the table.  Hardcode
3793      knowledge of the G_floating routines.  */
3794   if (TARGET_FLOAT_VAX)
3795     {
3796       if (TARGET_ABI_OPEN_VMS)
3797 	{
3798 	  if (code == FLOAT_EXTEND)
3799 	    return "OTS$CVT_FLOAT_G_X";
3800 	  if (code == FLOAT_TRUNCATE)
3801 	    return "OTS$CVT_FLOAT_X_G";
3802 	}
3803       else
3804 	{
3805 	  if (code == FLOAT_EXTEND)
3806 	    return "_OtsConvertFloatGX";
3807 	  if (code == FLOAT_TRUNCATE)
3808 	    return "_OtsConvertFloatXG";
3809 	}
3810     }
3811 
3812   if (TARGET_ABI_OPEN_VMS)
3813     ops = vms_xfloating_ops;
3814   else
3815     ops = osf_xfloating_ops;
3816 
3817   for (i = 0; i < n; ++i)
3818     if (ops[i].code == code)
3819       return ops[i].func;
3820 
3821   abort();
3822 }
3823 
3824 /* Most X_floating operations take the rounding mode as an argument.
3825    Compute that here.  */
3826 
3827 static int
alpha_compute_xfloating_mode_arg(code,round)3828 alpha_compute_xfloating_mode_arg (code, round)
3829      enum rtx_code code;
3830      enum alpha_fp_rounding_mode round;
3831 {
3832   int mode;
3833 
3834   switch (round)
3835     {
3836     case ALPHA_FPRM_NORM:
3837       mode = 2;
3838       break;
3839     case ALPHA_FPRM_MINF:
3840       mode = 1;
3841       break;
3842     case ALPHA_FPRM_CHOP:
3843       mode = 0;
3844       break;
3845     case ALPHA_FPRM_DYN:
3846       mode = 4;
3847       break;
3848     default:
3849       abort ();
3850 
3851     /* XXX For reference, round to +inf is mode = 3.  */
3852     }
3853 
3854   if (code == FLOAT_TRUNCATE && alpha_fptm == ALPHA_FPTM_N)
3855     mode |= 0x10000;
3856 
3857   return mode;
3858 }
3859 
3860 /* Emit an X_floating library function call.
3861 
3862    Note that these functions do not follow normal calling conventions:
3863    TFmode arguments are passed in two integer registers (as opposed to
3864    indirect); TFmode return values appear in R16+R17.
3865 
3866    FUNC is the function name to call.
3867    TARGET is where the output belongs.
3868    OPERANDS are the inputs.
3869    NOPERANDS is the count of inputs.
3870    EQUIV is the expression equivalent for the function.
3871 */
3872 
3873 static void
alpha_emit_xfloating_libcall(func,target,operands,noperands,equiv)3874 alpha_emit_xfloating_libcall (func, target, operands, noperands, equiv)
3875      const char *func;
3876      rtx target;
3877      rtx operands[];
3878      int noperands;
3879      rtx equiv;
3880 {
3881   rtx usage = NULL_RTX, tmp, reg;
3882   int regno = 16, i;
3883 
3884   start_sequence ();
3885 
3886   for (i = 0; i < noperands; ++i)
3887     {
3888       switch (GET_MODE (operands[i]))
3889 	{
3890 	case TFmode:
3891 	  reg = gen_rtx_REG (TFmode, regno);
3892 	  regno += 2;
3893 	  break;
3894 
3895 	case DFmode:
3896 	  reg = gen_rtx_REG (DFmode, regno + 32);
3897 	  regno += 1;
3898 	  break;
3899 
3900 	case VOIDmode:
3901 	  if (GET_CODE (operands[i]) != CONST_INT)
3902 	    abort ();
3903 	  /* FALLTHRU */
3904 	case DImode:
3905 	  reg = gen_rtx_REG (DImode, regno);
3906 	  regno += 1;
3907 	  break;
3908 
3909 	default:
3910 	  abort ();
3911 	}
3912 
3913       emit_move_insn (reg, operands[i]);
3914       usage = alloc_EXPR_LIST (0, gen_rtx_USE (VOIDmode, reg), usage);
3915     }
3916 
3917   switch (GET_MODE (target))
3918     {
3919     case TFmode:
3920       reg = gen_rtx_REG (TFmode, 16);
3921       break;
3922     case DFmode:
3923       reg = gen_rtx_REG (DFmode, 32);
3924       break;
3925     case DImode:
3926       reg = gen_rtx_REG (DImode, 0);
3927       break;
3928     default:
3929       abort ();
3930     }
3931 
3932   tmp = gen_rtx_MEM (QImode, gen_rtx_SYMBOL_REF (Pmode, (char *) func));
3933   tmp = emit_call_insn (GEN_CALL_VALUE (reg, tmp, const0_rtx,
3934 					const0_rtx, const0_rtx));
3935   CALL_INSN_FUNCTION_USAGE (tmp) = usage;
3936 
3937   tmp = get_insns ();
3938   end_sequence ();
3939 
3940   emit_libcall_block (tmp, target, reg, equiv);
3941 }
3942 
3943 /* Emit an X_floating library function call for arithmetic (+,-,*,/).  */
3944 
3945 void
alpha_emit_xfloating_arith(code,operands)3946 alpha_emit_xfloating_arith (code, operands)
3947      enum rtx_code code;
3948      rtx operands[];
3949 {
3950   const char *func;
3951   int mode;
3952   rtx out_operands[3];
3953 
3954   func = alpha_lookup_xfloating_lib_func (code);
3955   mode = alpha_compute_xfloating_mode_arg (code, alpha_fprm);
3956 
3957   out_operands[0] = operands[1];
3958   out_operands[1] = operands[2];
3959   out_operands[2] = GEN_INT (mode);
3960   alpha_emit_xfloating_libcall (func, operands[0], out_operands, 3,
3961 				gen_rtx_fmt_ee (code, TFmode, operands[1],
3962 						operands[2]));
3963 }
3964 
3965 /* Emit an X_floating library function call for a comparison.  */
3966 
3967 static rtx
alpha_emit_xfloating_compare(code,op0,op1)3968 alpha_emit_xfloating_compare (code, op0, op1)
3969      enum rtx_code code;
3970      rtx op0, op1;
3971 {
3972   const char *func;
3973   rtx out, operands[2];
3974 
3975   func = alpha_lookup_xfloating_lib_func (code);
3976 
3977   operands[0] = op0;
3978   operands[1] = op1;
3979   out = gen_reg_rtx (DImode);
3980 
3981   /* ??? Strange mode for equiv because what's actually returned
3982      is -1,0,1, not a proper boolean value.  */
3983   alpha_emit_xfloating_libcall (func, out, operands, 2,
3984 				gen_rtx_fmt_ee (code, CCmode, op0, op1));
3985 
3986   return out;
3987 }
3988 
3989 /* Emit an X_floating library function call for a conversion.  */
3990 
3991 void
alpha_emit_xfloating_cvt(code,operands)3992 alpha_emit_xfloating_cvt (code, operands)
3993      enum rtx_code code;
3994      rtx operands[];
3995 {
3996   int noperands = 1, mode;
3997   rtx out_operands[2];
3998   const char *func;
3999 
4000   func = alpha_lookup_xfloating_lib_func (code);
4001 
4002   out_operands[0] = operands[1];
4003 
4004   switch (code)
4005     {
4006     case FIX:
4007       mode = alpha_compute_xfloating_mode_arg (code, ALPHA_FPRM_CHOP);
4008       out_operands[1] = GEN_INT (mode);
4009       noperands = 2;
4010       break;
4011     case FLOAT_TRUNCATE:
4012       mode = alpha_compute_xfloating_mode_arg (code, alpha_fprm);
4013       out_operands[1] = GEN_INT (mode);
4014       noperands = 2;
4015       break;
4016     default:
4017       break;
4018     }
4019 
4020   alpha_emit_xfloating_libcall (func, operands[0], out_operands, noperands,
4021 				gen_rtx_fmt_e (code, GET_MODE (operands[0]),
4022 					       operands[1]));
4023 }
4024 
4025 /* Split a TFmode OP[1] into DImode OP[2,3] and likewise for
4026    OP[0] into OP[0,1].  Naturally, output operand ordering is
4027    little-endian.  */
4028 
4029 void
alpha_split_tfmode_pair(operands)4030 alpha_split_tfmode_pair (operands)
4031      rtx operands[4];
4032 {
4033   if (GET_CODE (operands[1]) == REG)
4034     {
4035       operands[3] = gen_rtx_REG (DImode, REGNO (operands[1]) + 1);
4036       operands[2] = gen_rtx_REG (DImode, REGNO (operands[1]));
4037     }
4038   else if (GET_CODE (operands[1]) == MEM)
4039     {
4040       operands[3] = adjust_address (operands[1], DImode, 8);
4041       operands[2] = adjust_address (operands[1], DImode, 0);
4042     }
4043   else if (operands[1] == CONST0_RTX (TFmode))
4044     operands[2] = operands[3] = const0_rtx;
4045   else
4046     abort ();
4047 
4048   if (GET_CODE (operands[0]) == REG)
4049     {
4050       operands[1] = gen_rtx_REG (DImode, REGNO (operands[0]) + 1);
4051       operands[0] = gen_rtx_REG (DImode, REGNO (operands[0]));
4052     }
4053   else if (GET_CODE (operands[0]) == MEM)
4054     {
4055       operands[1] = adjust_address (operands[0], DImode, 8);
4056       operands[0] = adjust_address (operands[0], DImode, 0);
4057     }
4058   else
4059     abort ();
4060 }
4061 
4062 /* Implement negtf2 or abstf2.  Op0 is destination, op1 is source,
4063    op2 is a register containing the sign bit, operation is the
4064    logical operation to be performed.  */
4065 
4066 void
alpha_split_tfmode_frobsign(operands,operation)4067 alpha_split_tfmode_frobsign (operands, operation)
4068      rtx operands[3];
4069      rtx (*operation) PARAMS ((rtx, rtx, rtx));
4070 {
4071   rtx high_bit = operands[2];
4072   rtx scratch;
4073   int move;
4074 
4075   alpha_split_tfmode_pair (operands);
4076 
4077   /* Detect three flavors of operand overlap.  */
4078   move = 1;
4079   if (rtx_equal_p (operands[0], operands[2]))
4080     move = 0;
4081   else if (rtx_equal_p (operands[1], operands[2]))
4082     {
4083       if (rtx_equal_p (operands[0], high_bit))
4084 	move = 2;
4085       else
4086 	move = -1;
4087     }
4088 
4089   if (move < 0)
4090     emit_move_insn (operands[0], operands[2]);
4091 
4092   /* ??? If the destination overlaps both source tf and high_bit, then
4093      assume source tf is dead in its entirety and use the other half
4094      for a scratch register.  Otherwise "scratch" is just the proper
4095      destination register.  */
4096   scratch = operands[move < 2 ? 1 : 3];
4097 
4098   emit_insn ((*operation) (scratch, high_bit, operands[3]));
4099 
4100   if (move > 0)
4101     {
4102       emit_move_insn (operands[0], operands[2]);
4103       if (move > 1)
4104 	emit_move_insn (operands[1], scratch);
4105     }
4106 }
4107 
4108 /* Use ext[wlq][lh] as the Architecture Handbook describes for extracting
4109    unaligned data:
4110 
4111            unsigned:                       signed:
4112    word:   ldq_u  r1,X(r11)                ldq_u  r1,X(r11)
4113            ldq_u  r2,X+1(r11)              ldq_u  r2,X+1(r11)
4114            lda    r3,X(r11)                lda    r3,X+2(r11)
4115            extwl  r1,r3,r1                 extql  r1,r3,r1
4116            extwh  r2,r3,r2                 extqh  r2,r3,r2
4117            or     r1.r2.r1                 or     r1,r2,r1
4118                                            sra    r1,48,r1
4119 
4120    long:   ldq_u  r1,X(r11)                ldq_u  r1,X(r11)
4121            ldq_u  r2,X+3(r11)              ldq_u  r2,X+3(r11)
4122            lda    r3,X(r11)                lda    r3,X(r11)
4123            extll  r1,r3,r1                 extll  r1,r3,r1
4124            extlh  r2,r3,r2                 extlh  r2,r3,r2
4125            or     r1.r2.r1                 addl   r1,r2,r1
4126 
4127    quad:   ldq_u  r1,X(r11)
4128            ldq_u  r2,X+7(r11)
4129            lda    r3,X(r11)
4130            extql  r1,r3,r1
4131            extqh  r2,r3,r2
4132            or     r1.r2.r1
4133 */
4134 
4135 void
alpha_expand_unaligned_load(tgt,mem,size,ofs,sign)4136 alpha_expand_unaligned_load (tgt, mem, size, ofs, sign)
4137      rtx tgt, mem;
4138      HOST_WIDE_INT size, ofs;
4139      int sign;
4140 {
4141   rtx meml, memh, addr, extl, exth, tmp, mema;
4142   enum machine_mode mode;
4143 
4144   meml = gen_reg_rtx (DImode);
4145   memh = gen_reg_rtx (DImode);
4146   addr = gen_reg_rtx (DImode);
4147   extl = gen_reg_rtx (DImode);
4148   exth = gen_reg_rtx (DImode);
4149 
4150   mema = XEXP (mem, 0);
4151   if (GET_CODE (mema) == LO_SUM)
4152     mema = force_reg (Pmode, mema);
4153 
4154   /* AND addresses cannot be in any alias set, since they may implicitly
4155      alias surrounding code.  Ideally we'd have some alias set that
4156      covered all types except those with alignment 8 or higher.  */
4157 
4158   tmp = change_address (mem, DImode,
4159 			gen_rtx_AND (DImode,
4160 				     plus_constant (mema, ofs),
4161 				     GEN_INT (-8)));
4162   set_mem_alias_set (tmp, 0);
4163   emit_move_insn (meml, tmp);
4164 
4165   tmp = change_address (mem, DImode,
4166 			gen_rtx_AND (DImode,
4167 				     plus_constant (mema, ofs + size - 1),
4168 				     GEN_INT (-8)));
4169   set_mem_alias_set (tmp, 0);
4170   emit_move_insn (memh, tmp);
4171 
4172   if (WORDS_BIG_ENDIAN && sign && (size == 2 || size == 4))
4173     {
4174       emit_move_insn (addr, plus_constant (mema, -1));
4175 
4176       emit_insn (gen_extqh_be (extl, meml, addr));
4177       emit_insn (gen_extxl_be (exth, memh, GEN_INT (64), addr));
4178 
4179       addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
4180       addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (64 - size*8),
4181 			   addr, 1, OPTAB_WIDEN);
4182     }
4183   else if (sign && size == 2)
4184     {
4185       emit_move_insn (addr, plus_constant (mema, ofs+2));
4186 
4187       emit_insn (gen_extxl_le (extl, meml, GEN_INT (64), addr));
4188       emit_insn (gen_extqh_le (exth, memh, addr));
4189 
4190       /* We must use tgt here for the target.  Alpha-vms port fails if we use
4191 	 addr for the target, because addr is marked as a pointer and combine
4192 	 knows that pointers are always sign-extended 32 bit values.  */
4193       addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
4194       addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (48),
4195 			   addr, 1, OPTAB_WIDEN);
4196     }
4197   else
4198     {
4199       if (WORDS_BIG_ENDIAN)
4200 	{
4201 	  emit_move_insn (addr, plus_constant (mema, ofs+size-1));
4202 	  switch ((int) size)
4203 	    {
4204 	    case 2:
4205 	      emit_insn (gen_extwh_be (extl, meml, addr));
4206 	      mode = HImode;
4207 	      break;
4208 
4209 	    case 4:
4210 	      emit_insn (gen_extlh_be (extl, meml, addr));
4211 	      mode = SImode;
4212 	      break;
4213 
4214 	    case 8:
4215 	      emit_insn (gen_extqh_be (extl, meml, addr));
4216 	      mode = DImode;
4217 	      break;
4218 
4219 	    default:
4220 	      abort ();
4221 	    }
4222 	  emit_insn (gen_extxl_be (exth, memh, GEN_INT (size*8), addr));
4223 	}
4224       else
4225 	{
4226 	  emit_move_insn (addr, plus_constant (mema, ofs));
4227 	  emit_insn (gen_extxl_le (extl, meml, GEN_INT (size*8), addr));
4228 	  switch ((int) size)
4229 	    {
4230 	    case 2:
4231 	      emit_insn (gen_extwh_le (exth, memh, addr));
4232 	      mode = HImode;
4233 	      break;
4234 
4235 	    case 4:
4236 	      emit_insn (gen_extlh_le (exth, memh, addr));
4237 	      mode = SImode;
4238 	      break;
4239 
4240 	    case 8:
4241 	      emit_insn (gen_extqh_le (exth, memh, addr));
4242 	      mode = DImode;
4243 	      break;
4244 
4245 	    default:
4246 	      abort();
4247 	    }
4248 	}
4249 
4250       addr = expand_binop (mode, ior_optab, gen_lowpart (mode, extl),
4251 			   gen_lowpart (mode, exth), gen_lowpart (mode, tgt),
4252 			   sign, OPTAB_WIDEN);
4253     }
4254 
4255   if (addr != tgt)
4256     emit_move_insn (tgt, gen_lowpart(GET_MODE (tgt), addr));
4257 }
4258 
4259 /* Similarly, use ins and msk instructions to perform unaligned stores.  */
4260 
4261 void
alpha_expand_unaligned_store(dst,src,size,ofs)4262 alpha_expand_unaligned_store (dst, src, size, ofs)
4263      rtx dst, src;
4264      HOST_WIDE_INT size, ofs;
4265 {
4266   rtx dstl, dsth, addr, insl, insh, meml, memh, dsta;
4267 
4268   dstl = gen_reg_rtx (DImode);
4269   dsth = gen_reg_rtx (DImode);
4270   insl = gen_reg_rtx (DImode);
4271   insh = gen_reg_rtx (DImode);
4272 
4273   dsta = XEXP (dst, 0);
4274   if (GET_CODE (dsta) == LO_SUM)
4275     dsta = force_reg (Pmode, dsta);
4276 
4277   /* AND addresses cannot be in any alias set, since they may implicitly
4278      alias surrounding code.  Ideally we'd have some alias set that
4279      covered all types except those with alignment 8 or higher.  */
4280 
4281   meml = change_address (dst, DImode,
4282 			 gen_rtx_AND (DImode,
4283 				      plus_constant (dsta, ofs),
4284 				      GEN_INT (-8)));
4285   set_mem_alias_set (meml, 0);
4286 
4287   memh = change_address (dst, DImode,
4288 			 gen_rtx_AND (DImode,
4289 				      plus_constant (dsta, ofs + size - 1),
4290 				      GEN_INT (-8)));
4291   set_mem_alias_set (memh, 0);
4292 
4293   emit_move_insn (dsth, memh);
4294   emit_move_insn (dstl, meml);
4295   if (WORDS_BIG_ENDIAN)
4296     {
4297       addr = copy_addr_to_reg (plus_constant (dsta, ofs+size-1));
4298 
4299       if (src != const0_rtx)
4300 	{
4301 	  switch ((int) size)
4302 	    {
4303 	    case 2:
4304 	      emit_insn (gen_inswl_be (insh, gen_lowpart (HImode,src), addr));
4305 	      break;
4306 	    case 4:
4307 	      emit_insn (gen_insll_be (insh, gen_lowpart (SImode,src), addr));
4308 	      break;
4309 	    case 8:
4310 	      emit_insn (gen_insql_be (insh, gen_lowpart (DImode,src), addr));
4311 	      break;
4312 	    }
4313 	  emit_insn (gen_insxh (insl, gen_lowpart (DImode, src),
4314 				GEN_INT (size*8), addr));
4315 	}
4316 
4317       switch ((int) size)
4318 	{
4319 	case 2:
4320 	  emit_insn (gen_mskxl_be (dsth, dsth, GEN_INT (0xffff), addr));
4321 	  break;
4322 	case 4:
4323 	  {
4324 	    rtx msk = immed_double_const (0xffffffff, 0, DImode);
4325 	    emit_insn (gen_mskxl_be (dsth, dsth, msk, addr));
4326 	    break;
4327 	  }
4328 	case 8:
4329 	  emit_insn (gen_mskxl_be (dsth, dsth, constm1_rtx, addr));
4330 	  break;
4331 	}
4332 
4333       emit_insn (gen_mskxh (dstl, dstl, GEN_INT (size*8), addr));
4334     }
4335   else
4336     {
4337       addr = copy_addr_to_reg (plus_constant (dsta, ofs));
4338 
4339       if (src != const0_rtx)
4340 	{
4341 	  emit_insn (gen_insxh (insh, gen_lowpart (DImode, src),
4342 				GEN_INT (size*8), addr));
4343 
4344 	  switch ((int) size)
4345 	    {
4346 	    case 2:
4347 	      emit_insn (gen_inswl_le (insl, gen_lowpart (HImode, src), addr));
4348 	      break;
4349 	    case 4:
4350 	      emit_insn (gen_insll_le (insl, gen_lowpart (SImode, src), addr));
4351 	      break;
4352 	    case 8:
4353 	      emit_insn (gen_insql_le (insl, src, addr));
4354 	      break;
4355 	    }
4356 	}
4357 
4358       emit_insn (gen_mskxh (dsth, dsth, GEN_INT (size*8), addr));
4359 
4360       switch ((int) size)
4361 	{
4362 	case 2:
4363 	  emit_insn (gen_mskxl_le (dstl, dstl, GEN_INT (0xffff), addr));
4364 	  break;
4365 	case 4:
4366 	  {
4367 	    rtx msk = immed_double_const (0xffffffff, 0, DImode);
4368 	    emit_insn (gen_mskxl_le (dstl, dstl, msk, addr));
4369 	    break;
4370 	  }
4371 	case 8:
4372 	  emit_insn (gen_mskxl_le (dstl, dstl, constm1_rtx, addr));
4373 	  break;
4374 	}
4375     }
4376 
4377   if (src != const0_rtx)
4378     {
4379       dsth = expand_binop (DImode, ior_optab, insh, dsth, dsth, 0, OPTAB_WIDEN);
4380       dstl = expand_binop (DImode, ior_optab, insl, dstl, dstl, 0, OPTAB_WIDEN);
4381     }
4382 
4383   if (WORDS_BIG_ENDIAN)
4384     {
4385       emit_move_insn (meml, dstl);
4386       emit_move_insn (memh, dsth);
4387     }
4388   else
4389     {
4390       /* Must store high before low for degenerate case of aligned.  */
4391       emit_move_insn (memh, dsth);
4392       emit_move_insn (meml, dstl);
4393     }
4394 }
4395 
4396 /* The block move code tries to maximize speed by separating loads and
4397    stores at the expense of register pressure: we load all of the data
4398    before we store it back out.  There are two secondary effects worth
4399    mentioning, that this speeds copying to/from aligned and unaligned
4400    buffers, and that it makes the code significantly easier to write.  */
4401 
4402 #define MAX_MOVE_WORDS	8
4403 
4404 /* Load an integral number of consecutive unaligned quadwords.  */
4405 
4406 static void
alpha_expand_unaligned_load_words(out_regs,smem,words,ofs)4407 alpha_expand_unaligned_load_words (out_regs, smem, words, ofs)
4408      rtx *out_regs;
4409      rtx smem;
4410      HOST_WIDE_INT words, ofs;
4411 {
4412   rtx const im8 = GEN_INT (-8);
4413   rtx const i64 = GEN_INT (64);
4414   rtx ext_tmps[MAX_MOVE_WORDS], data_regs[MAX_MOVE_WORDS+1];
4415   rtx sreg, areg, tmp, smema;
4416   HOST_WIDE_INT i;
4417 
4418   smema = XEXP (smem, 0);
4419   if (GET_CODE (smema) == LO_SUM)
4420     smema = force_reg (Pmode, smema);
4421 
4422   /* Generate all the tmp registers we need.  */
4423   for (i = 0; i < words; ++i)
4424     {
4425       data_regs[i] = out_regs[i];
4426       ext_tmps[i] = gen_reg_rtx (DImode);
4427     }
4428   data_regs[words] = gen_reg_rtx (DImode);
4429 
4430   if (ofs != 0)
4431     smem = adjust_address (smem, GET_MODE (smem), ofs);
4432 
4433   /* Load up all of the source data.  */
4434   for (i = 0; i < words; ++i)
4435     {
4436       tmp = change_address (smem, DImode,
4437 			    gen_rtx_AND (DImode,
4438 					 plus_constant (smema, 8*i),
4439 					 im8));
4440       set_mem_alias_set (tmp, 0);
4441       emit_move_insn (data_regs[i], tmp);
4442     }
4443 
4444   tmp = change_address (smem, DImode,
4445 			gen_rtx_AND (DImode,
4446 				     plus_constant (smema, 8*words - 1),
4447 				     im8));
4448   set_mem_alias_set (tmp, 0);
4449   emit_move_insn (data_regs[words], tmp);
4450 
4451   /* Extract the half-word fragments.  Unfortunately DEC decided to make
4452      extxh with offset zero a noop instead of zeroing the register, so
4453      we must take care of that edge condition ourselves with cmov.  */
4454 
4455   sreg = copy_addr_to_reg (smema);
4456   areg = expand_binop (DImode, and_optab, sreg, GEN_INT (7), NULL,
4457 		       1, OPTAB_WIDEN);
4458   if (WORDS_BIG_ENDIAN)
4459     emit_move_insn (sreg, plus_constant (sreg, 7));
4460   for (i = 0; i < words; ++i)
4461     {
4462       if (WORDS_BIG_ENDIAN)
4463 	{
4464 	  emit_insn (gen_extqh_be (data_regs[i], data_regs[i], sreg));
4465 	  emit_insn (gen_extxl_be (ext_tmps[i], data_regs[i+1], i64, sreg));
4466 	}
4467       else
4468 	{
4469 	  emit_insn (gen_extxl_le (data_regs[i], data_regs[i], i64, sreg));
4470 	  emit_insn (gen_extqh_le (ext_tmps[i], data_regs[i+1], sreg));
4471 	}
4472       emit_insn (gen_rtx_SET (VOIDmode, ext_tmps[i],
4473 			      gen_rtx_IF_THEN_ELSE (DImode,
4474 						    gen_rtx_EQ (DImode, areg,
4475 								const0_rtx),
4476 						    const0_rtx, ext_tmps[i])));
4477     }
4478 
4479   /* Merge the half-words into whole words.  */
4480   for (i = 0; i < words; ++i)
4481     {
4482       out_regs[i] = expand_binop (DImode, ior_optab, data_regs[i],
4483 				  ext_tmps[i], data_regs[i], 1, OPTAB_WIDEN);
4484     }
4485 }
4486 
4487 /* Store an integral number of consecutive unaligned quadwords.  DATA_REGS
4488    may be NULL to store zeros.  */
4489 
4490 static void
alpha_expand_unaligned_store_words(data_regs,dmem,words,ofs)4491 alpha_expand_unaligned_store_words (data_regs, dmem, words, ofs)
4492      rtx *data_regs;
4493      rtx dmem;
4494      HOST_WIDE_INT words, ofs;
4495 {
4496   rtx const im8 = GEN_INT (-8);
4497   rtx const i64 = GEN_INT (64);
4498   rtx ins_tmps[MAX_MOVE_WORDS];
4499   rtx st_tmp_1, st_tmp_2, dreg;
4500   rtx st_addr_1, st_addr_2, dmema;
4501   HOST_WIDE_INT i;
4502 
4503   dmema = XEXP (dmem, 0);
4504   if (GET_CODE (dmema) == LO_SUM)
4505     dmema = force_reg (Pmode, dmema);
4506 
4507   /* Generate all the tmp registers we need.  */
4508   if (data_regs != NULL)
4509     for (i = 0; i < words; ++i)
4510       ins_tmps[i] = gen_reg_rtx(DImode);
4511   st_tmp_1 = gen_reg_rtx(DImode);
4512   st_tmp_2 = gen_reg_rtx(DImode);
4513 
4514   if (ofs != 0)
4515     dmem = adjust_address (dmem, GET_MODE (dmem), ofs);
4516 
4517   st_addr_2 = change_address (dmem, DImode,
4518 			      gen_rtx_AND (DImode,
4519 					   plus_constant (dmema, words*8 - 1),
4520 				       im8));
4521   set_mem_alias_set (st_addr_2, 0);
4522 
4523   st_addr_1 = change_address (dmem, DImode,
4524 			      gen_rtx_AND (DImode, dmema, im8));
4525   set_mem_alias_set (st_addr_1, 0);
4526 
4527   /* Load up the destination end bits.  */
4528   emit_move_insn (st_tmp_2, st_addr_2);
4529   emit_move_insn (st_tmp_1, st_addr_1);
4530 
4531   /* Shift the input data into place.  */
4532   dreg = copy_addr_to_reg (dmema);
4533   if (WORDS_BIG_ENDIAN)
4534     emit_move_insn (dreg, plus_constant (dreg, 7));
4535   if (data_regs != NULL)
4536     {
4537       for (i = words-1; i >= 0; --i)
4538 	{
4539 	  if (WORDS_BIG_ENDIAN)
4540 	    {
4541 	      emit_insn (gen_insql_be (ins_tmps[i], data_regs[i], dreg));
4542 	      emit_insn (gen_insxh (data_regs[i], data_regs[i], i64, dreg));
4543 	    }
4544 	  else
4545 	    {
4546 	      emit_insn (gen_insxh (ins_tmps[i], data_regs[i], i64, dreg));
4547 	      emit_insn (gen_insql_le (data_regs[i], data_regs[i], dreg));
4548 	    }
4549 	}
4550       for (i = words-1; i > 0; --i)
4551 	{
4552 	  ins_tmps[i-1] = expand_binop (DImode, ior_optab, data_regs[i],
4553 					ins_tmps[i-1], ins_tmps[i-1], 1,
4554 					OPTAB_WIDEN);
4555 	}
4556     }
4557 
4558   /* Split and merge the ends with the destination data.  */
4559   if (WORDS_BIG_ENDIAN)
4560     {
4561       emit_insn (gen_mskxl_be (st_tmp_2, st_tmp_2, constm1_rtx, dreg));
4562       emit_insn (gen_mskxh (st_tmp_1, st_tmp_1, i64, dreg));
4563     }
4564   else
4565     {
4566       emit_insn (gen_mskxh (st_tmp_2, st_tmp_2, i64, dreg));
4567       emit_insn (gen_mskxl_le (st_tmp_1, st_tmp_1, constm1_rtx, dreg));
4568     }
4569 
4570   if (data_regs != NULL)
4571     {
4572       st_tmp_2 = expand_binop (DImode, ior_optab, st_tmp_2, ins_tmps[words-1],
4573 			       st_tmp_2, 1, OPTAB_WIDEN);
4574       st_tmp_1 = expand_binop (DImode, ior_optab, st_tmp_1, data_regs[0],
4575 			       st_tmp_1, 1, OPTAB_WIDEN);
4576     }
4577 
4578   /* Store it all.  */
4579   if (WORDS_BIG_ENDIAN)
4580     emit_move_insn (st_addr_1, st_tmp_1);
4581   else
4582     emit_move_insn (st_addr_2, st_tmp_2);
4583   for (i = words-1; i > 0; --i)
4584     {
4585       rtx tmp = change_address (dmem, DImode,
4586 				gen_rtx_AND (DImode,
4587 					     plus_constant(dmema,
4588 					     WORDS_BIG_ENDIAN ? i*8-1 : i*8),
4589 					     im8));
4590       set_mem_alias_set (tmp, 0);
4591       emit_move_insn (tmp, data_regs ? ins_tmps[i-1] : const0_rtx);
4592     }
4593   if (WORDS_BIG_ENDIAN)
4594     emit_move_insn (st_addr_2, st_tmp_2);
4595   else
4596     emit_move_insn (st_addr_1, st_tmp_1);
4597 }
4598 
4599 
4600 /* Expand string/block move operations.
4601 
4602    operands[0] is the pointer to the destination.
4603    operands[1] is the pointer to the source.
4604    operands[2] is the number of bytes to move.
4605    operands[3] is the alignment.  */
4606 
4607 int
alpha_expand_block_move(operands)4608 alpha_expand_block_move (operands)
4609      rtx operands[];
4610 {
4611   rtx bytes_rtx	= operands[2];
4612   rtx align_rtx = operands[3];
4613   HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
4614   HOST_WIDE_INT bytes = orig_bytes;
4615   HOST_WIDE_INT src_align = INTVAL (align_rtx) * BITS_PER_UNIT;
4616   HOST_WIDE_INT dst_align = src_align;
4617   rtx orig_src = operands[1];
4618   rtx orig_dst = operands[0];
4619   rtx data_regs[2 * MAX_MOVE_WORDS + 16];
4620   rtx tmp;
4621   unsigned int i, words, ofs, nregs = 0;
4622 
4623   if (orig_bytes <= 0)
4624     return 1;
4625   else if (orig_bytes > MAX_MOVE_WORDS * UNITS_PER_WORD)
4626     return 0;
4627 
4628   /* Look for additional alignment information from recorded register info.  */
4629 
4630   tmp = XEXP (orig_src, 0);
4631   if (GET_CODE (tmp) == REG)
4632     src_align = MAX (src_align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4633   else if (GET_CODE (tmp) == PLUS
4634 	   && GET_CODE (XEXP (tmp, 0)) == REG
4635 	   && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4636     {
4637       unsigned HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4638       unsigned int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4639 
4640       if (a > src_align)
4641 	{
4642           if (a >= 64 && c % 8 == 0)
4643 	    src_align = 64;
4644           else if (a >= 32 && c % 4 == 0)
4645 	    src_align = 32;
4646           else if (a >= 16 && c % 2 == 0)
4647 	    src_align = 16;
4648 	}
4649     }
4650 
4651   tmp = XEXP (orig_dst, 0);
4652   if (GET_CODE (tmp) == REG)
4653     dst_align = MAX (dst_align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4654   else if (GET_CODE (tmp) == PLUS
4655 	   && GET_CODE (XEXP (tmp, 0)) == REG
4656 	   && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4657     {
4658       unsigned HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4659       unsigned int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4660 
4661       if (a > dst_align)
4662 	{
4663           if (a >= 64 && c % 8 == 0)
4664 	    dst_align = 64;
4665           else if (a >= 32 && c % 4 == 0)
4666 	    dst_align = 32;
4667           else if (a >= 16 && c % 2 == 0)
4668 	    dst_align = 16;
4669 	}
4670     }
4671 
4672   /* Load the entire block into registers.  */
4673   if (GET_CODE (XEXP (orig_src, 0)) == ADDRESSOF)
4674     {
4675       enum machine_mode mode;
4676 
4677       tmp = XEXP (XEXP (orig_src, 0), 0);
4678 
4679       /* Don't use the existing register if we're reading more than
4680 	 is held in the register.  Nor if there is not a mode that
4681 	 handles the exact size.  */
4682       mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 1);
4683       if (mode != BLKmode
4684 	  && GET_MODE_SIZE (GET_MODE (tmp)) >= bytes)
4685 	{
4686 	  if (mode == TImode)
4687 	    {
4688 	      data_regs[nregs] = gen_lowpart (DImode, tmp);
4689 	      data_regs[nregs + 1] = gen_highpart (DImode, tmp);
4690 	      nregs += 2;
4691 	    }
4692 	  else
4693 	    data_regs[nregs++] = gen_lowpart (mode, tmp);
4694 
4695 	  goto src_done;
4696 	}
4697 
4698       /* No appropriate mode; fall back on memory.  */
4699       orig_src = replace_equiv_address (orig_src,
4700 					copy_addr_to_reg (XEXP (orig_src, 0)));
4701       src_align = GET_MODE_BITSIZE (GET_MODE (tmp));
4702     }
4703 
4704   ofs = 0;
4705   if (src_align >= 64 && bytes >= 8)
4706     {
4707       words = bytes / 8;
4708 
4709       for (i = 0; i < words; ++i)
4710 	data_regs[nregs + i] = gen_reg_rtx (DImode);
4711 
4712       for (i = 0; i < words; ++i)
4713 	emit_move_insn (data_regs[nregs + i],
4714 			adjust_address (orig_src, DImode, ofs + i * 8));
4715 
4716       nregs += words;
4717       bytes -= words * 8;
4718       ofs += words * 8;
4719     }
4720 
4721   if (src_align >= 32 && bytes >= 4)
4722     {
4723       words = bytes / 4;
4724 
4725       for (i = 0; i < words; ++i)
4726 	data_regs[nregs + i] = gen_reg_rtx (SImode);
4727 
4728       for (i = 0; i < words; ++i)
4729 	emit_move_insn (data_regs[nregs + i],
4730 			adjust_address (orig_src, SImode, ofs + i * 4));
4731 
4732       nregs += words;
4733       bytes -= words * 4;
4734       ofs += words * 4;
4735     }
4736 
4737   if (bytes >= 8)
4738     {
4739       words = bytes / 8;
4740 
4741       for (i = 0; i < words+1; ++i)
4742 	data_regs[nregs + i] = gen_reg_rtx (DImode);
4743 
4744       alpha_expand_unaligned_load_words (data_regs + nregs, orig_src,
4745 					 words, ofs);
4746 
4747       nregs += words;
4748       bytes -= words * 8;
4749       ofs += words * 8;
4750     }
4751 
4752   if (! TARGET_BWX && bytes >= 4)
4753     {
4754       data_regs[nregs++] = tmp = gen_reg_rtx (SImode);
4755       alpha_expand_unaligned_load (tmp, orig_src, 4, ofs, 0);
4756       bytes -= 4;
4757       ofs += 4;
4758     }
4759 
4760   if (bytes >= 2)
4761     {
4762       if (src_align >= 16)
4763 	{
4764 	  do {
4765 	    data_regs[nregs++] = tmp = gen_reg_rtx (HImode);
4766 	    emit_move_insn (tmp, adjust_address (orig_src, HImode, ofs));
4767 	    bytes -= 2;
4768 	    ofs += 2;
4769 	  } while (bytes >= 2);
4770 	}
4771       else if (! TARGET_BWX)
4772 	{
4773 	  data_regs[nregs++] = tmp = gen_reg_rtx (HImode);
4774 	  alpha_expand_unaligned_load (tmp, orig_src, 2, ofs, 0);
4775 	  bytes -= 2;
4776 	  ofs += 2;
4777 	}
4778     }
4779 
4780   while (bytes > 0)
4781     {
4782       data_regs[nregs++] = tmp = gen_reg_rtx (QImode);
4783       emit_move_insn (tmp, adjust_address (orig_src, QImode, ofs));
4784       bytes -= 1;
4785       ofs += 1;
4786     }
4787 
4788  src_done:
4789 
4790   if (nregs > ARRAY_SIZE (data_regs))
4791     abort ();
4792 
4793   /* Now save it back out again.  */
4794 
4795   i = 0, ofs = 0;
4796 
4797   if (GET_CODE (XEXP (orig_dst, 0)) == ADDRESSOF)
4798     {
4799       enum machine_mode mode;
4800       tmp = XEXP (XEXP (orig_dst, 0), 0);
4801 
4802       mode = mode_for_size (orig_bytes * BITS_PER_UNIT, MODE_INT, 1);
4803       if (GET_MODE (tmp) == mode)
4804 	{
4805 	  if (nregs == 1)
4806 	    {
4807 	      emit_move_insn (tmp, data_regs[0]);
4808 	      i = 1;
4809 	      goto dst_done;
4810 	    }
4811 
4812 	  else if (nregs == 2 && mode == TImode)
4813 	    {
4814 	      /* Undo the subregging done above when copying between
4815 		 two TImode registers.  */
4816 	      if (GET_CODE (data_regs[0]) == SUBREG
4817 		  && GET_MODE (SUBREG_REG (data_regs[0])) == TImode)
4818 		emit_move_insn (tmp, SUBREG_REG (data_regs[0]));
4819 	      else
4820 		{
4821 		  rtx seq;
4822 
4823 		  start_sequence ();
4824 		  emit_move_insn (gen_lowpart (DImode, tmp), data_regs[0]);
4825 		  emit_move_insn (gen_highpart (DImode, tmp), data_regs[1]);
4826 		  seq = get_insns ();
4827 		  end_sequence ();
4828 
4829 		  emit_no_conflict_block (seq, tmp, data_regs[0],
4830 					  data_regs[1], NULL_RTX);
4831 		}
4832 
4833 	      i = 2;
4834 	      goto dst_done;
4835 	    }
4836 	}
4837 
4838       /* ??? If nregs > 1, consider reconstructing the word in regs.  */
4839       /* ??? Optimize mode < dst_mode with strict_low_part.  */
4840 
4841       /* No appropriate mode; fall back on memory.  We can speed things
4842 	 up by recognizing extra alignment information.  */
4843       orig_dst = replace_equiv_address (orig_dst,
4844 					copy_addr_to_reg (XEXP (orig_dst, 0)));
4845       dst_align = GET_MODE_BITSIZE (GET_MODE (tmp));
4846     }
4847 
4848   /* Write out the data in whatever chunks reading the source allowed.  */
4849   if (dst_align >= 64)
4850     {
4851       while (i < nregs && GET_MODE (data_regs[i]) == DImode)
4852 	{
4853 	  emit_move_insn (adjust_address (orig_dst, DImode, ofs),
4854 			  data_regs[i]);
4855 	  ofs += 8;
4856 	  i++;
4857 	}
4858     }
4859 
4860   if (dst_align >= 32)
4861     {
4862       /* If the source has remaining DImode regs, write them out in
4863 	 two pieces.  */
4864       while (i < nregs && GET_MODE (data_regs[i]) == DImode)
4865 	{
4866 	  tmp = expand_binop (DImode, lshr_optab, data_regs[i], GEN_INT (32),
4867 			      NULL_RTX, 1, OPTAB_WIDEN);
4868 
4869 	  emit_move_insn (adjust_address (orig_dst, SImode, ofs),
4870 			  gen_lowpart (SImode, data_regs[i]));
4871 	  emit_move_insn (adjust_address (orig_dst, SImode, ofs + 4),
4872 			  gen_lowpart (SImode, tmp));
4873 	  ofs += 8;
4874 	  i++;
4875 	}
4876 
4877       while (i < nregs && GET_MODE (data_regs[i]) == SImode)
4878 	{
4879 	  emit_move_insn (adjust_address (orig_dst, SImode, ofs),
4880 			  data_regs[i]);
4881 	  ofs += 4;
4882 	  i++;
4883 	}
4884     }
4885 
4886   if (i < nregs && GET_MODE (data_regs[i]) == DImode)
4887     {
4888       /* Write out a remaining block of words using unaligned methods.  */
4889 
4890       for (words = 1; i + words < nregs; words++)
4891 	if (GET_MODE (data_regs[i + words]) != DImode)
4892 	  break;
4893 
4894       if (words == 1)
4895 	alpha_expand_unaligned_store (orig_dst, data_regs[i], 8, ofs);
4896       else
4897         alpha_expand_unaligned_store_words (data_regs + i, orig_dst,
4898 					    words, ofs);
4899 
4900       i += words;
4901       ofs += words * 8;
4902     }
4903 
4904   /* Due to the above, this won't be aligned.  */
4905   /* ??? If we have more than one of these, consider constructing full
4906      words in registers and using alpha_expand_unaligned_store_words.  */
4907   while (i < nregs && GET_MODE (data_regs[i]) == SImode)
4908     {
4909       alpha_expand_unaligned_store (orig_dst, data_regs[i], 4, ofs);
4910       ofs += 4;
4911       i++;
4912     }
4913 
4914   if (dst_align >= 16)
4915     while (i < nregs && GET_MODE (data_regs[i]) == HImode)
4916       {
4917 	emit_move_insn (adjust_address (orig_dst, HImode, ofs), data_regs[i]);
4918 	i++;
4919 	ofs += 2;
4920       }
4921   else
4922     while (i < nregs && GET_MODE (data_regs[i]) == HImode)
4923       {
4924 	alpha_expand_unaligned_store (orig_dst, data_regs[i], 2, ofs);
4925 	i++;
4926 	ofs += 2;
4927       }
4928 
4929   while (i < nregs && GET_MODE (data_regs[i]) == QImode)
4930     {
4931       emit_move_insn (adjust_address (orig_dst, QImode, ofs), data_regs[i]);
4932       i++;
4933       ofs += 1;
4934     }
4935 
4936  dst_done:
4937 
4938   if (i != nregs)
4939     abort ();
4940 
4941   return 1;
4942 }
4943 
4944 int
alpha_expand_block_clear(operands)4945 alpha_expand_block_clear (operands)
4946      rtx operands[];
4947 {
4948   rtx bytes_rtx	= operands[1];
4949   rtx align_rtx = operands[2];
4950   HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
4951   HOST_WIDE_INT bytes = orig_bytes;
4952   HOST_WIDE_INT align = INTVAL (align_rtx) * BITS_PER_UNIT;
4953   HOST_WIDE_INT alignofs = 0;
4954   rtx orig_dst = operands[0];
4955   rtx tmp;
4956   int i, words, ofs = 0;
4957 
4958   if (orig_bytes <= 0)
4959     return 1;
4960   if (orig_bytes > MAX_MOVE_WORDS * UNITS_PER_WORD)
4961     return 0;
4962 
4963   /* Look for stricter alignment.  */
4964   tmp = XEXP (orig_dst, 0);
4965   if (GET_CODE (tmp) == REG)
4966     align = MAX (align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4967   else if (GET_CODE (tmp) == PLUS
4968 	   && GET_CODE (XEXP (tmp, 0)) == REG
4969 	   && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4970     {
4971       HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4972       int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4973 
4974       if (a > align)
4975 	{
4976           if (a >= 64)
4977 	    align = a, alignofs = 8 - c % 8;
4978           else if (a >= 32)
4979 	    align = a, alignofs = 4 - c % 4;
4980           else if (a >= 16)
4981 	    align = a, alignofs = 2 - c % 2;
4982 	}
4983     }
4984   else if (GET_CODE (tmp) == ADDRESSOF)
4985     {
4986       enum machine_mode mode;
4987 
4988       mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 1);
4989       if (GET_MODE (XEXP (tmp, 0)) == mode)
4990 	{
4991 	  emit_move_insn (XEXP (tmp, 0), const0_rtx);
4992 	  return 1;
4993 	}
4994 
4995       /* No appropriate mode; fall back on memory.  */
4996       orig_dst = replace_equiv_address (orig_dst, copy_addr_to_reg (tmp));
4997       align = GET_MODE_BITSIZE (GET_MODE (XEXP (tmp, 0)));
4998     }
4999 
5000   /* Handle an unaligned prefix first.  */
5001 
5002   if (alignofs > 0)
5003     {
5004 #if HOST_BITS_PER_WIDE_INT >= 64
5005       /* Given that alignofs is bounded by align, the only time BWX could
5006 	 generate three stores is for a 7 byte fill.  Prefer two individual
5007 	 stores over a load/mask/store sequence.  */
5008       if ((!TARGET_BWX || alignofs == 7)
5009 	       && align >= 32
5010 	       && !(alignofs == 4 && bytes >= 4))
5011 	{
5012 	  enum machine_mode mode = (align >= 64 ? DImode : SImode);
5013 	  int inv_alignofs = (align >= 64 ? 8 : 4) - alignofs;
5014 	  rtx mem, tmp;
5015 	  HOST_WIDE_INT mask;
5016 
5017 	  mem = adjust_address (orig_dst, mode, ofs - inv_alignofs);
5018 	  set_mem_alias_set (mem, 0);
5019 
5020 	  mask = ~(~(HOST_WIDE_INT)0 << (inv_alignofs * 8));
5021 	  if (bytes < alignofs)
5022 	    {
5023 	      mask |= ~(HOST_WIDE_INT)0 << ((inv_alignofs + bytes) * 8);
5024 	      ofs += bytes;
5025 	      bytes = 0;
5026 	    }
5027 	  else
5028 	    {
5029 	      bytes -= alignofs;
5030 	      ofs += alignofs;
5031 	    }
5032 	  alignofs = 0;
5033 
5034 	  tmp = expand_binop (mode, and_optab, mem, GEN_INT (mask),
5035 			      NULL_RTX, 1, OPTAB_WIDEN);
5036 
5037 	  emit_move_insn (mem, tmp);
5038 	}
5039 #endif
5040 
5041       if (TARGET_BWX && (alignofs & 1) && bytes >= 1)
5042 	{
5043 	  emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
5044 	  bytes -= 1;
5045 	  ofs += 1;
5046 	  alignofs -= 1;
5047 	}
5048       if (TARGET_BWX && align >= 16 && (alignofs & 3) == 2 && bytes >= 2)
5049 	{
5050 	  emit_move_insn (adjust_address (orig_dst, HImode, ofs), const0_rtx);
5051 	  bytes -= 2;
5052 	  ofs += 2;
5053 	  alignofs -= 2;
5054 	}
5055       if (alignofs == 4 && bytes >= 4)
5056 	{
5057 	  emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
5058 	  bytes -= 4;
5059 	  ofs += 4;
5060 	  alignofs = 0;
5061 	}
5062 
5063       /* If we've not used the extra lead alignment information by now,
5064 	 we won't be able to.  Downgrade align to match what's left over.  */
5065       if (alignofs > 0)
5066 	{
5067 	  alignofs = alignofs & -alignofs;
5068 	  align = MIN (align, alignofs * BITS_PER_UNIT);
5069 	}
5070     }
5071 
5072   /* Handle a block of contiguous long-words.  */
5073 
5074   if (align >= 64 && bytes >= 8)
5075     {
5076       words = bytes / 8;
5077 
5078       for (i = 0; i < words; ++i)
5079 	emit_move_insn (adjust_address (orig_dst, DImode, ofs + i * 8),
5080 			const0_rtx);
5081 
5082       bytes -= words * 8;
5083       ofs += words * 8;
5084     }
5085 
5086   /* If the block is large and appropriately aligned, emit a single
5087      store followed by a sequence of stq_u insns.  */
5088 
5089   if (align >= 32 && bytes > 16)
5090     {
5091       rtx orig_dsta;
5092 
5093       emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
5094       bytes -= 4;
5095       ofs += 4;
5096 
5097       orig_dsta = XEXP (orig_dst, 0);
5098       if (GET_CODE (orig_dsta) == LO_SUM)
5099 	orig_dsta = force_reg (Pmode, orig_dsta);
5100 
5101       words = bytes / 8;
5102       for (i = 0; i < words; ++i)
5103 	{
5104 	  rtx mem
5105 	    = change_address (orig_dst, DImode,
5106 			      gen_rtx_AND (DImode,
5107 					   plus_constant (orig_dsta, ofs + i*8),
5108 					   GEN_INT (-8)));
5109 	  set_mem_alias_set (mem, 0);
5110 	  emit_move_insn (mem, const0_rtx);
5111 	}
5112 
5113       /* Depending on the alignment, the first stq_u may have overlapped
5114 	 with the initial stl, which means that the last stq_u didn't
5115 	 write as much as it would appear.  Leave those questionable bytes
5116 	 unaccounted for.  */
5117       bytes -= words * 8 - 4;
5118       ofs += words * 8 - 4;
5119     }
5120 
5121   /* Handle a smaller block of aligned words.  */
5122 
5123   if ((align >= 64 && bytes == 4)
5124       || (align == 32 && bytes >= 4))
5125     {
5126       words = bytes / 4;
5127 
5128       for (i = 0; i < words; ++i)
5129 	emit_move_insn (adjust_address (orig_dst, SImode, ofs + i * 4),
5130 			const0_rtx);
5131 
5132       bytes -= words * 4;
5133       ofs += words * 4;
5134     }
5135 
5136   /* An unaligned block uses stq_u stores for as many as possible.  */
5137 
5138   if (bytes >= 8)
5139     {
5140       words = bytes / 8;
5141 
5142       alpha_expand_unaligned_store_words (NULL, orig_dst, words, ofs);
5143 
5144       bytes -= words * 8;
5145       ofs += words * 8;
5146     }
5147 
5148   /* Next clean up any trailing pieces.  */
5149 
5150 #if HOST_BITS_PER_WIDE_INT >= 64
5151   /* Count the number of bits in BYTES for which aligned stores could
5152      be emitted.  */
5153   words = 0;
5154   for (i = (TARGET_BWX ? 1 : 4); i * BITS_PER_UNIT <= align ; i <<= 1)
5155     if (bytes & i)
5156       words += 1;
5157 
5158   /* If we have appropriate alignment (and it wouldn't take too many
5159      instructions otherwise), mask out the bytes we need.  */
5160   if (TARGET_BWX ? words > 2 : bytes > 0)
5161     {
5162       if (align >= 64)
5163 	{
5164 	  rtx mem, tmp;
5165 	  HOST_WIDE_INT mask;
5166 
5167 	  mem = adjust_address (orig_dst, DImode, ofs);
5168 	  set_mem_alias_set (mem, 0);
5169 
5170 	  mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
5171 
5172 	  tmp = expand_binop (DImode, and_optab, mem, GEN_INT (mask),
5173 			      NULL_RTX, 1, OPTAB_WIDEN);
5174 
5175 	  emit_move_insn (mem, tmp);
5176 	  return 1;
5177 	}
5178       else if (align >= 32 && bytes < 4)
5179 	{
5180 	  rtx mem, tmp;
5181 	  HOST_WIDE_INT mask;
5182 
5183 	  mem = adjust_address (orig_dst, SImode, ofs);
5184 	  set_mem_alias_set (mem, 0);
5185 
5186 	  mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
5187 
5188 	  tmp = expand_binop (SImode, and_optab, mem, GEN_INT (mask),
5189 			      NULL_RTX, 1, OPTAB_WIDEN);
5190 
5191 	  emit_move_insn (mem, tmp);
5192 	  return 1;
5193 	}
5194     }
5195 #endif
5196 
5197   if (!TARGET_BWX && bytes >= 4)
5198     {
5199       alpha_expand_unaligned_store (orig_dst, const0_rtx, 4, ofs);
5200       bytes -= 4;
5201       ofs += 4;
5202     }
5203 
5204   if (bytes >= 2)
5205     {
5206       if (align >= 16)
5207 	{
5208 	  do {
5209 	    emit_move_insn (adjust_address (orig_dst, HImode, ofs),
5210 			    const0_rtx);
5211 	    bytes -= 2;
5212 	    ofs += 2;
5213 	  } while (bytes >= 2);
5214 	}
5215       else if (! TARGET_BWX)
5216 	{
5217 	  alpha_expand_unaligned_store (orig_dst, const0_rtx, 2, ofs);
5218 	  bytes -= 2;
5219 	  ofs += 2;
5220 	}
5221     }
5222 
5223   while (bytes > 0)
5224     {
5225       emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
5226       bytes -= 1;
5227       ofs += 1;
5228     }
5229 
5230   return 1;
5231 }
5232 
5233 /* Returns a mask so that zap(x, value) == x & mask.  */
5234 
5235 rtx
alpha_expand_zap_mask(value)5236 alpha_expand_zap_mask (value)
5237      HOST_WIDE_INT value;
5238 {
5239   rtx result;
5240   int i;
5241 
5242   if (HOST_BITS_PER_WIDE_INT >= 64)
5243     {
5244       HOST_WIDE_INT mask = 0;
5245 
5246       for (i = 7; i >= 0; --i)
5247 	{
5248 	  mask <<= 8;
5249 	  if (!((value >> i) & 1))
5250 	    mask |= 0xff;
5251 	}
5252 
5253       result = gen_int_mode (mask, DImode);
5254     }
5255   else if (HOST_BITS_PER_WIDE_INT == 32)
5256     {
5257       HOST_WIDE_INT mask_lo = 0, mask_hi = 0;
5258 
5259       for (i = 7; i >= 4; --i)
5260 	{
5261 	  mask_hi <<= 8;
5262 	  if (!((value >> i) & 1))
5263 	    mask_hi |= 0xff;
5264 	}
5265 
5266       for (i = 3; i >= 0; --i)
5267 	{
5268 	  mask_lo <<= 8;
5269 	  if (!((value >> i) & 1))
5270 	    mask_lo |= 0xff;
5271 	}
5272 
5273       result = immed_double_const (mask_lo, mask_hi, DImode);
5274     }
5275   else
5276     abort ();
5277 
5278   return result;
5279 }
5280 
5281 void
5282 alpha_expand_builtin_vector_binop (gen, mode, op0, op1, op2)
5283      rtx (*gen) PARAMS ((rtx, rtx, rtx));
5284      enum machine_mode mode;
5285      rtx op0, op1, op2;
5286 {
5287   op0 = gen_lowpart (mode, op0);
5288 
5289   if (op1 == const0_rtx)
5290     op1 = CONST0_RTX (mode);
5291   else
5292     op1 = gen_lowpart (mode, op1);
5293 
5294   if (op2 == const0_rtx)
5295     op2 = CONST0_RTX (mode);
5296   else
5297     op2 = gen_lowpart (mode, op2);
5298 
5299   emit_insn ((*gen) (op0, op1, op2));
5300 }
5301 
5302 /* Adjust the cost of a scheduling dependency.  Return the new cost of
5303    a dependency LINK or INSN on DEP_INSN.  COST is the current cost.  */
5304 
5305 static int
alpha_adjust_cost(insn,link,dep_insn,cost)5306 alpha_adjust_cost (insn, link, dep_insn, cost)
5307      rtx insn;
5308      rtx link;
5309      rtx dep_insn;
5310      int cost;
5311 {
5312   enum attr_type insn_type, dep_insn_type;
5313 
5314   /* If the dependence is an anti-dependence, there is no cost.  For an
5315      output dependence, there is sometimes a cost, but it doesn't seem
5316      worth handling those few cases.  */
5317   if (REG_NOTE_KIND (link) != 0)
5318     return cost;
5319 
5320   /* If we can't recognize the insns, we can't really do anything.  */
5321   if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
5322     return cost;
5323 
5324   insn_type = get_attr_type (insn);
5325   dep_insn_type = get_attr_type (dep_insn);
5326 
5327   /* Bring in the user-defined memory latency.  */
5328   if (dep_insn_type == TYPE_ILD
5329       || dep_insn_type == TYPE_FLD
5330       || dep_insn_type == TYPE_LDSYM)
5331     cost += alpha_memory_latency-1;
5332 
5333   /* Everything else handled in DFA bypasses now.  */
5334 
5335   return cost;
5336 }
5337 
5338 /* The number of instructions that can be issued per cycle.  */
5339 
5340 static int
alpha_issue_rate()5341 alpha_issue_rate ()
5342 {
5343   return (alpha_cpu == PROCESSOR_EV4 ? 2 : 4);
5344 }
5345 
5346 static int
alpha_use_dfa_pipeline_interface()5347 alpha_use_dfa_pipeline_interface ()
5348 {
5349   return true;
5350 }
5351 
5352 /* How many alternative schedules to try.  This should be as wide as the
5353    scheduling freedom in the DFA, but no wider.  Making this value too
5354    large results extra work for the scheduler.
5355 
5356    For EV4, loads can be issued to either IB0 or IB1, thus we have 2
5357    alternative schedules.  For EV5, we can choose between E0/E1 and
5358    FA/FM.  For EV6, an arithmatic insn can be issued to U0/U1/L0/L1.  */
5359 
5360 static int
alpha_multipass_dfa_lookahead()5361 alpha_multipass_dfa_lookahead ()
5362 {
5363   return (alpha_cpu == PROCESSOR_EV6 ? 4 : 2);
5364 }
5365 
5366 /* Machine-specific function data.  */
5367 
5368 struct machine_function GTY(())
5369 {
5370   /* For unicosmk. */
5371   /* List of call information words for calls from this function.  */
5372   struct rtx_def *first_ciw;
5373   struct rtx_def *last_ciw;
5374   int ciw_count;
5375 
5376   /* List of deferred case vectors.  */
5377   struct rtx_def *addr_list;
5378 
5379   /* For OSF. */
5380   const char *some_ld_name;
5381 };
5382 
5383 /* How to allocate a 'struct machine_function'.  */
5384 
5385 static struct machine_function *
alpha_init_machine_status()5386 alpha_init_machine_status ()
5387 {
5388   return ((struct machine_function *)
5389 		ggc_alloc_cleared (sizeof (struct machine_function)));
5390 }
5391 
5392 /* Functions to save and restore alpha_return_addr_rtx.  */
5393 
5394 /* Start the ball rolling with RETURN_ADDR_RTX.  */
5395 
5396 rtx
alpha_return_addr(count,frame)5397 alpha_return_addr (count, frame)
5398      int count;
5399      rtx frame ATTRIBUTE_UNUSED;
5400 {
5401   if (count != 0)
5402     return const0_rtx;
5403 
5404   return get_hard_reg_initial_val (Pmode, REG_RA);
5405 }
5406 
5407 /* Return or create a pseudo containing the gp value for the current
5408    function.  Needed only if TARGET_LD_BUGGY_LDGP.  */
5409 
5410 rtx
alpha_gp_save_rtx()5411 alpha_gp_save_rtx ()
5412 {
5413   rtx r = get_hard_reg_initial_val (DImode, 29);
5414   if (GET_CODE (r) != MEM)
5415     r = gen_mem_addressof (r, NULL_TREE, /*rescan=*/true);
5416   return r;
5417 }
5418 
5419 static int
alpha_ra_ever_killed()5420 alpha_ra_ever_killed ()
5421 {
5422   rtx top;
5423 
5424   if (!has_hard_reg_initial_val (Pmode, REG_RA))
5425     return regs_ever_live[REG_RA];
5426 
5427   push_topmost_sequence ();
5428   top = get_insns ();
5429   pop_topmost_sequence ();
5430 
5431   return reg_set_between_p (gen_rtx_REG (Pmode, REG_RA), top, NULL_RTX);
5432 }
5433 
5434 
5435 /* Return the trap mode suffix applicable to the current
5436    instruction, or NULL.  */
5437 
5438 static const char *
get_trap_mode_suffix()5439 get_trap_mode_suffix ()
5440 {
5441   enum attr_trap_suffix s = get_attr_trap_suffix (current_output_insn);
5442 
5443   switch (s)
5444     {
5445     case TRAP_SUFFIX_NONE:
5446       return NULL;
5447 
5448     case TRAP_SUFFIX_SU:
5449       if (alpha_fptm >= ALPHA_FPTM_SU)
5450 	return "su";
5451       return NULL;
5452 
5453     case TRAP_SUFFIX_SUI:
5454       if (alpha_fptm >= ALPHA_FPTM_SUI)
5455 	return "sui";
5456       return NULL;
5457 
5458     case TRAP_SUFFIX_V_SV:
5459       switch (alpha_fptm)
5460 	{
5461 	case ALPHA_FPTM_N:
5462 	  return NULL;
5463 	case ALPHA_FPTM_U:
5464 	  return "v";
5465 	case ALPHA_FPTM_SU:
5466 	case ALPHA_FPTM_SUI:
5467 	  return "sv";
5468 	}
5469       break;
5470 
5471     case TRAP_SUFFIX_V_SV_SVI:
5472       switch (alpha_fptm)
5473 	{
5474 	case ALPHA_FPTM_N:
5475 	  return NULL;
5476 	case ALPHA_FPTM_U:
5477 	  return "v";
5478 	case ALPHA_FPTM_SU:
5479 	  return "sv";
5480 	case ALPHA_FPTM_SUI:
5481 	  return "svi";
5482 	}
5483       break;
5484 
5485     case TRAP_SUFFIX_U_SU_SUI:
5486       switch (alpha_fptm)
5487 	{
5488 	case ALPHA_FPTM_N:
5489 	  return NULL;
5490 	case ALPHA_FPTM_U:
5491 	  return "u";
5492 	case ALPHA_FPTM_SU:
5493 	  return "su";
5494 	case ALPHA_FPTM_SUI:
5495 	  return "sui";
5496 	}
5497       break;
5498     }
5499   abort ();
5500 }
5501 
5502 /* Return the rounding mode suffix applicable to the current
5503    instruction, or NULL.  */
5504 
5505 static const char *
get_round_mode_suffix()5506 get_round_mode_suffix ()
5507 {
5508   enum attr_round_suffix s = get_attr_round_suffix (current_output_insn);
5509 
5510   switch (s)
5511     {
5512     case ROUND_SUFFIX_NONE:
5513       return NULL;
5514     case ROUND_SUFFIX_NORMAL:
5515       switch (alpha_fprm)
5516 	{
5517 	case ALPHA_FPRM_NORM:
5518 	  return NULL;
5519 	case ALPHA_FPRM_MINF:
5520 	  return "m";
5521 	case ALPHA_FPRM_CHOP:
5522 	  return "c";
5523 	case ALPHA_FPRM_DYN:
5524 	  return "d";
5525 	}
5526       break;
5527 
5528     case ROUND_SUFFIX_C:
5529       return "c";
5530     }
5531   abort ();
5532 }
5533 
5534 /* Locate some local-dynamic symbol still in use by this function
5535    so that we can print its name in some movdi_er_tlsldm pattern.  */
5536 
5537 static const char *
get_some_local_dynamic_name()5538 get_some_local_dynamic_name ()
5539 {
5540   rtx insn;
5541 
5542   if (cfun->machine->some_ld_name)
5543     return cfun->machine->some_ld_name;
5544 
5545   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
5546     if (INSN_P (insn)
5547 	&& for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
5548       return cfun->machine->some_ld_name;
5549 
5550   abort ();
5551 }
5552 
5553 static int
get_some_local_dynamic_name_1(px,data)5554 get_some_local_dynamic_name_1 (px, data)
5555      rtx *px;
5556      void *data ATTRIBUTE_UNUSED;
5557 {
5558   rtx x = *px;
5559 
5560   if (GET_CODE (x) == SYMBOL_REF)
5561     {
5562       const char *str = XSTR (x, 0);
5563       if (str[0] == '@' && str[1] == 'D')
5564 	{
5565           cfun->machine->some_ld_name = str;
5566           return 1;
5567 	}
5568     }
5569 
5570   return 0;
5571 }
5572 
5573 /* Print an operand.  Recognize special options, documented below.  */
5574 
5575 void
print_operand(file,x,code)5576 print_operand (file, x, code)
5577     FILE *file;
5578     rtx x;
5579     int code;
5580 {
5581   int i;
5582 
5583   switch (code)
5584     {
5585     case '~':
5586       /* Print the assembler name of the current function.  */
5587       assemble_name (file, alpha_fnname);
5588       break;
5589 
5590     case '&':
5591       assemble_name (file, get_some_local_dynamic_name ());
5592       break;
5593 
5594     case '/':
5595       {
5596 	const char *trap = get_trap_mode_suffix ();
5597 	const char *round = get_round_mode_suffix ();
5598 
5599 	if (trap || round)
5600 	  fprintf (file, (TARGET_AS_SLASH_BEFORE_SUFFIX ? "/%s%s" : "%s%s"),
5601 		   (trap ? trap : ""), (round ? round : ""));
5602 	break;
5603       }
5604 
5605     case ',':
5606       /* Generates single precision instruction suffix.  */
5607       fputc ((TARGET_FLOAT_VAX ? 'f' : 's'), file);
5608       break;
5609 
5610     case '-':
5611       /* Generates double precision instruction suffix.  */
5612       fputc ((TARGET_FLOAT_VAX ? 'g' : 't'), file);
5613       break;
5614 
5615     case '#':
5616       if (alpha_this_literal_sequence_number == 0)
5617 	alpha_this_literal_sequence_number = alpha_next_sequence_number++;
5618       fprintf (file, "%d", alpha_this_literal_sequence_number);
5619       break;
5620 
5621     case '*':
5622       if (alpha_this_gpdisp_sequence_number == 0)
5623 	alpha_this_gpdisp_sequence_number = alpha_next_sequence_number++;
5624       fprintf (file, "%d", alpha_this_gpdisp_sequence_number);
5625       break;
5626 
5627     case 'H':
5628       if (GET_CODE (x) == HIGH)
5629 	output_addr_const (file, XEXP (x, 0));
5630       else
5631 	output_operand_lossage ("invalid %%H value");
5632       break;
5633 
5634     case 'J':
5635       {
5636 	const char *lituse;
5637 
5638         if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD_CALL)
5639 	  {
5640 	    x = XVECEXP (x, 0, 0);
5641 	    lituse = "lituse_tlsgd";
5642 	  }
5643 	else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM_CALL)
5644 	  {
5645 	    x = XVECEXP (x, 0, 0);
5646 	    lituse = "lituse_tlsldm";
5647 	  }
5648 	else if (GET_CODE (x) == CONST_INT)
5649 	  lituse = "lituse_jsr";
5650 	else
5651 	  {
5652 	    output_operand_lossage ("invalid %%J value");
5653 	    break;
5654 	  }
5655 
5656 	if (x != const0_rtx)
5657 	  fprintf (file, "\t\t!%s!%d", lituse, (int) INTVAL (x));
5658       }
5659       break;
5660 
5661     case 'r':
5662       /* If this operand is the constant zero, write it as "$31".  */
5663       if (GET_CODE (x) == REG)
5664 	fprintf (file, "%s", reg_names[REGNO (x)]);
5665       else if (x == CONST0_RTX (GET_MODE (x)))
5666 	fprintf (file, "$31");
5667       else
5668 	output_operand_lossage ("invalid %%r value");
5669       break;
5670 
5671     case 'R':
5672       /* Similar, but for floating-point.  */
5673       if (GET_CODE (x) == REG)
5674 	fprintf (file, "%s", reg_names[REGNO (x)]);
5675       else if (x == CONST0_RTX (GET_MODE (x)))
5676 	fprintf (file, "$f31");
5677       else
5678 	output_operand_lossage ("invalid %%R value");
5679       break;
5680 
5681     case 'N':
5682       /* Write the 1's complement of a constant.  */
5683       if (GET_CODE (x) != CONST_INT)
5684 	output_operand_lossage ("invalid %%N value");
5685 
5686       fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
5687       break;
5688 
5689     case 'P':
5690       /* Write 1 << C, for a constant C.  */
5691       if (GET_CODE (x) != CONST_INT)
5692 	output_operand_lossage ("invalid %%P value");
5693 
5694       fprintf (file, HOST_WIDE_INT_PRINT_DEC, (HOST_WIDE_INT) 1 << INTVAL (x));
5695       break;
5696 
5697     case 'h':
5698       /* Write the high-order 16 bits of a constant, sign-extended.  */
5699       if (GET_CODE (x) != CONST_INT)
5700 	output_operand_lossage ("invalid %%h value");
5701 
5702       fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) >> 16);
5703       break;
5704 
5705     case 'L':
5706       /* Write the low-order 16 bits of a constant, sign-extended.  */
5707       if (GET_CODE (x) != CONST_INT)
5708 	output_operand_lossage ("invalid %%L value");
5709 
5710       fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5711 	       (INTVAL (x) & 0xffff) - 2 * (INTVAL (x) & 0x8000));
5712       break;
5713 
5714     case 'm':
5715       /* Write mask for ZAP insn.  */
5716       if (GET_CODE (x) == CONST_DOUBLE)
5717 	{
5718 	  HOST_WIDE_INT mask = 0;
5719 	  HOST_WIDE_INT value;
5720 
5721 	  value = CONST_DOUBLE_LOW (x);
5722 	  for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5723 	       i++, value >>= 8)
5724 	    if (value & 0xff)
5725 	      mask |= (1 << i);
5726 
5727 	  value = CONST_DOUBLE_HIGH (x);
5728 	  for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5729 	       i++, value >>= 8)
5730 	    if (value & 0xff)
5731 	      mask |= (1 << (i + sizeof (int)));
5732 
5733 	  fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask & 0xff);
5734 	}
5735 
5736       else if (GET_CODE (x) == CONST_INT)
5737 	{
5738 	  HOST_WIDE_INT mask = 0, value = INTVAL (x);
5739 
5740 	  for (i = 0; i < 8; i++, value >>= 8)
5741 	    if (value & 0xff)
5742 	      mask |= (1 << i);
5743 
5744 	  fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask);
5745 	}
5746       else
5747 	output_operand_lossage ("invalid %%m value");
5748       break;
5749 
5750     case 'M':
5751       /* 'b', 'w', 'l', or 'q' as the value of the constant.  */
5752       if (GET_CODE (x) != CONST_INT
5753 	  || (INTVAL (x) != 8 && INTVAL (x) != 16
5754 	      && INTVAL (x) != 32 && INTVAL (x) != 64))
5755 	output_operand_lossage ("invalid %%M value");
5756 
5757       fprintf (file, "%s",
5758 	       (INTVAL (x) == 8 ? "b"
5759 		: INTVAL (x) == 16 ? "w"
5760 		: INTVAL (x) == 32 ? "l"
5761 		: "q"));
5762       break;
5763 
5764     case 'U':
5765       /* Similar, except do it from the mask.  */
5766       if (GET_CODE (x) == CONST_INT)
5767 	{
5768 	  HOST_WIDE_INT value = INTVAL (x);
5769 
5770 	  if (value == 0xff)
5771 	    {
5772 	      fputc ('b', file);
5773 	      break;
5774 	    }
5775 	  if (value == 0xffff)
5776 	    {
5777 	      fputc ('w', file);
5778 	      break;
5779 	    }
5780 	  if (value == 0xffffffff)
5781 	    {
5782 	      fputc ('l', file);
5783 	      break;
5784 	    }
5785 	  if (value == -1)
5786 	    {
5787 	      fputc ('q', file);
5788 	      break;
5789 	    }
5790 	}
5791       else if (HOST_BITS_PER_WIDE_INT == 32
5792 	       && GET_CODE (x) == CONST_DOUBLE
5793 	       && CONST_DOUBLE_LOW (x) == 0xffffffff
5794 	       && CONST_DOUBLE_HIGH (x) == 0)
5795 	{
5796 	  fputc ('l', file);
5797 	  break;
5798 	}
5799       output_operand_lossage ("invalid %%U value");
5800       break;
5801 
5802     case 's':
5803       /* Write the constant value divided by 8 for little-endian mode or
5804 	 (56 - value) / 8 for big-endian mode.  */
5805 
5806       if (GET_CODE (x) != CONST_INT
5807 	  || (unsigned HOST_WIDE_INT) INTVAL (x) >= (WORDS_BIG_ENDIAN
5808 						     ? 56
5809 						     : 64)
5810 	  || (INTVAL (x) & 7) != 0)
5811 	output_operand_lossage ("invalid %%s value");
5812 
5813       fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5814 	       WORDS_BIG_ENDIAN
5815 	       ? (56 - INTVAL (x)) / 8
5816 	       : INTVAL (x) / 8);
5817       break;
5818 
5819     case 'S':
5820       /* Same, except compute (64 - c) / 8 */
5821 
5822       if (GET_CODE (x) != CONST_INT
5823 	  && (unsigned HOST_WIDE_INT) INTVAL (x) >= 64
5824 	  && (INTVAL (x) & 7) != 8)
5825 	output_operand_lossage ("invalid %%s value");
5826 
5827       fprintf (file, HOST_WIDE_INT_PRINT_DEC, (64 - INTVAL (x)) / 8);
5828       break;
5829 
5830     case 't':
5831       {
5832         /* On Unicos/Mk systems: use a DEX expression if the symbol
5833 	   clashes with a register name.  */
5834 	int dex = unicosmk_need_dex (x);
5835 	if (dex)
5836 	  fprintf (file, "DEX(%d)", dex);
5837 	else
5838 	  output_addr_const (file, x);
5839       }
5840       break;
5841 
5842     case 'C': case 'D': case 'c': case 'd':
5843       /* Write out comparison name.  */
5844       {
5845 	enum rtx_code c = GET_CODE (x);
5846 
5847         if (GET_RTX_CLASS (c) != '<')
5848 	  output_operand_lossage ("invalid %%C value");
5849 
5850 	else if (code == 'D')
5851 	  c = reverse_condition (c);
5852 	else if (code == 'c')
5853 	  c = swap_condition (c);
5854 	else if (code == 'd')
5855 	  c = swap_condition (reverse_condition (c));
5856 
5857         if (c == LEU)
5858 	  fprintf (file, "ule");
5859         else if (c == LTU)
5860 	  fprintf (file, "ult");
5861 	else if (c == UNORDERED)
5862 	  fprintf (file, "un");
5863         else
5864 	  fprintf (file, "%s", GET_RTX_NAME (c));
5865       }
5866       break;
5867 
5868     case 'E':
5869       /* Write the divide or modulus operator.  */
5870       switch (GET_CODE (x))
5871 	{
5872 	case DIV:
5873 	  fprintf (file, "div%s", GET_MODE (x) == SImode ? "l" : "q");
5874 	  break;
5875 	case UDIV:
5876 	  fprintf (file, "div%su", GET_MODE (x) == SImode ? "l" : "q");
5877 	  break;
5878 	case MOD:
5879 	  fprintf (file, "rem%s", GET_MODE (x) == SImode ? "l" : "q");
5880 	  break;
5881 	case UMOD:
5882 	  fprintf (file, "rem%su", GET_MODE (x) == SImode ? "l" : "q");
5883 	  break;
5884 	default:
5885 	  output_operand_lossage ("invalid %%E value");
5886 	  break;
5887 	}
5888       break;
5889 
5890     case 'A':
5891       /* Write "_u" for unaligned access.  */
5892       if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)
5893 	fprintf (file, "_u");
5894       break;
5895 
5896     case 0:
5897       if (GET_CODE (x) == REG)
5898 	fprintf (file, "%s", reg_names[REGNO (x)]);
5899       else if (GET_CODE (x) == MEM)
5900 	output_address (XEXP (x, 0));
5901       else if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
5902 	{
5903 	  switch (XINT (XEXP (x, 0), 1))
5904 	    {
5905 	    case UNSPEC_DTPREL:
5906 	    case UNSPEC_TPREL:
5907 	      output_addr_const (file, XVECEXP (XEXP (x, 0), 0, 0));
5908 	      break;
5909 	    default:
5910 	      output_operand_lossage ("unknown relocation unspec");
5911 	      break;
5912 	    }
5913 	}
5914       else
5915 	output_addr_const (file, x);
5916       break;
5917 
5918     default:
5919       output_operand_lossage ("invalid %%xn code");
5920     }
5921 }
5922 
5923 void
print_operand_address(file,addr)5924 print_operand_address (file, addr)
5925     FILE *file;
5926      rtx addr;
5927 {
5928   int basereg = 31;
5929   HOST_WIDE_INT offset = 0;
5930 
5931   if (GET_CODE (addr) == AND)
5932     addr = XEXP (addr, 0);
5933 
5934   if (GET_CODE (addr) == PLUS
5935       && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5936     {
5937       offset = INTVAL (XEXP (addr, 1));
5938       addr = XEXP (addr, 0);
5939     }
5940 
5941   if (GET_CODE (addr) == LO_SUM)
5942     {
5943       const char *reloc16, *reloclo;
5944       rtx op1 = XEXP (addr, 1);
5945 
5946       if (GET_CODE (op1) == CONST && GET_CODE (XEXP (op1, 0)) == UNSPEC)
5947 	{
5948 	  op1 = XEXP (op1, 0);
5949 	  switch (XINT (op1, 1))
5950 	    {
5951 	    case UNSPEC_DTPREL:
5952 	      reloc16 = NULL;
5953 	      reloclo = (alpha_tls_size == 16 ? "dtprel" : "dtprello");
5954 	      break;
5955 	    case UNSPEC_TPREL:
5956 	      reloc16 = NULL;
5957 	      reloclo = (alpha_tls_size == 16 ? "tprel" : "tprello");
5958 	      break;
5959 	    default:
5960 	      output_operand_lossage ("unknown relocation unspec");
5961 	      return;
5962 	    }
5963 
5964 	  output_addr_const (file, XVECEXP (op1, 0, 0));
5965 	}
5966       else
5967 	{
5968 	  reloc16 = "gprel";
5969 	  reloclo = "gprellow";
5970 	  output_addr_const (file, op1);
5971 	}
5972 
5973       if (offset)
5974 	{
5975 	  fputc ('+', file);
5976 	  fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
5977 	}
5978 
5979       addr = XEXP (addr, 0);
5980       if (GET_CODE (addr) == REG)
5981 	basereg = REGNO (addr);
5982       else if (GET_CODE (addr) == SUBREG
5983 	       && GET_CODE (SUBREG_REG (addr)) == REG)
5984 	basereg = subreg_regno (addr);
5985       else
5986 	abort ();
5987 
5988       fprintf (file, "($%d)\t\t!%s", basereg,
5989 	       (basereg == 29 ? reloc16 : reloclo));
5990       return;
5991     }
5992 
5993   if (GET_CODE (addr) == REG)
5994     basereg = REGNO (addr);
5995   else if (GET_CODE (addr) == SUBREG
5996 	   && GET_CODE (SUBREG_REG (addr)) == REG)
5997     basereg = subreg_regno (addr);
5998   else if (GET_CODE (addr) == CONST_INT)
5999     offset = INTVAL (addr);
6000 
6001 #if TARGET_ABI_OPEN_VMS
6002   else if (GET_CODE (addr) == SYMBOL_REF)
6003     {
6004       fprintf (file, "%s", XSTR (addr, 0));
6005       return;
6006     }
6007   else if (GET_CODE (addr) == CONST
6008 	   && GET_CODE (XEXP (addr, 0)) == PLUS
6009 	   && GET_CODE (XEXP (XEXP (addr, 0), 0)) == SYMBOL_REF)
6010     {
6011       fprintf (file, "%s+%d",
6012 	       XSTR (XEXP (XEXP (addr, 0), 0), 0),
6013 	       INTVAL (XEXP (XEXP (addr, 0), 1)));
6014       return;
6015     }
6016 #endif
6017 
6018   else
6019     abort ();
6020 
6021   fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
6022   fprintf (file, "($%d)", basereg);
6023 }
6024 
6025 /* Emit RTL insns to initialize the variable parts of a trampoline at
6026    TRAMP. FNADDR is an RTX for the address of the function's pure
6027    code.  CXT is an RTX for the static chain value for the function.
6028 
6029    The three offset parameters are for the individual template's
6030    layout.  A JMPOFS < 0 indicates that the trampoline does not
6031    contain instructions at all.
6032 
6033    We assume here that a function will be called many more times than
6034    its address is taken (e.g., it might be passed to qsort), so we
6035    take the trouble to initialize the "hint" field in the JMP insn.
6036    Note that the hint field is PC (new) + 4 * bits 13:0.  */
6037 
6038 void
alpha_initialize_trampoline(tramp,fnaddr,cxt,fnofs,cxtofs,jmpofs)6039 alpha_initialize_trampoline (tramp, fnaddr, cxt, fnofs, cxtofs, jmpofs)
6040      rtx tramp, fnaddr, cxt;
6041      int fnofs, cxtofs, jmpofs;
6042 {
6043   rtx temp, temp1, addr;
6044   /* VMS really uses DImode pointers in memory at this point.  */
6045   enum machine_mode mode = TARGET_ABI_OPEN_VMS ? Pmode : ptr_mode;
6046 
6047 #ifdef POINTERS_EXTEND_UNSIGNED
6048   fnaddr = convert_memory_address (mode, fnaddr);
6049   cxt = convert_memory_address (mode, cxt);
6050 #endif
6051 
6052   /* Store function address and CXT.  */
6053   addr = memory_address (mode, plus_constant (tramp, fnofs));
6054   emit_move_insn (gen_rtx_MEM (mode, addr), fnaddr);
6055   addr = memory_address (mode, plus_constant (tramp, cxtofs));
6056   emit_move_insn (gen_rtx_MEM (mode, addr), cxt);
6057 
6058   /* This has been disabled since the hint only has a 32k range, and in
6059      no existing OS is the stack within 32k of the text segment.  */
6060   if (0 && jmpofs >= 0)
6061     {
6062       /* Compute hint value.  */
6063       temp = force_operand (plus_constant (tramp, jmpofs+4), NULL_RTX);
6064       temp = expand_binop (DImode, sub_optab, fnaddr, temp, temp, 1,
6065 			   OPTAB_WIDEN);
6066       temp = expand_shift (RSHIFT_EXPR, Pmode, temp,
6067 		           build_int_2 (2, 0), NULL_RTX, 1);
6068       temp = expand_and (SImode, gen_lowpart (SImode, temp),
6069 			 GEN_INT (0x3fff), 0);
6070 
6071       /* Merge in the hint.  */
6072       addr = memory_address (SImode, plus_constant (tramp, jmpofs));
6073       temp1 = force_reg (SImode, gen_rtx_MEM (SImode, addr));
6074       temp1 = expand_and (SImode, temp1, GEN_INT (0xffffc000), NULL_RTX);
6075       temp1 = expand_binop (SImode, ior_optab, temp1, temp, temp1, 1,
6076 			    OPTAB_WIDEN);
6077       emit_move_insn (gen_rtx_MEM (SImode, addr), temp1);
6078     }
6079 
6080 #ifdef TRANSFER_FROM_TRAMPOLINE
6081   emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__enable_execute_stack"),
6082 		     0, VOIDmode, 1, tramp, Pmode);
6083 #endif
6084 
6085   if (jmpofs >= 0)
6086     emit_insn (gen_imb ());
6087 }
6088 
6089 /* Determine where to put an argument to a function.
6090    Value is zero to push the argument on the stack,
6091    or a hard register in which to store the argument.
6092 
6093    MODE is the argument's machine mode.
6094    TYPE is the data type of the argument (as a tree).
6095     This is null for libcalls where that information may
6096     not be available.
6097    CUM is a variable of type CUMULATIVE_ARGS which gives info about
6098     the preceding args and about the function being called.
6099    NAMED is nonzero if this argument is a named parameter
6100     (otherwise it is an extra parameter matching an ellipsis).
6101 
6102    On Alpha the first 6 words of args are normally in registers
6103    and the rest are pushed.  */
6104 
6105 rtx
function_arg(cum,mode,type,named)6106 function_arg (cum, mode, type, named)
6107      CUMULATIVE_ARGS cum;
6108      enum machine_mode mode;
6109      tree type;
6110      int named ATTRIBUTE_UNUSED;
6111 {
6112   int basereg;
6113   int num_args;
6114 
6115   /* Set up defaults for FP operands passed in FP registers, and
6116      integral operands passed in integer registers.  */
6117   if (TARGET_FPREGS
6118       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
6119 	  || GET_MODE_CLASS (mode) == MODE_FLOAT))
6120     basereg = 32 + 16;
6121   else
6122     basereg = 16;
6123 
6124   /* ??? Irritatingly, the definition of CUMULATIVE_ARGS is different for
6125      the three platforms, so we can't avoid conditional compilation.  */
6126 #if TARGET_ABI_OPEN_VMS
6127     {
6128       if (mode == VOIDmode)
6129 	return alpha_arg_info_reg_val (cum);
6130 
6131       num_args = cum.num_args;
6132       if (num_args >= 6 || MUST_PASS_IN_STACK (mode, type))
6133 	return NULL_RTX;
6134     }
6135 #else
6136 #if TARGET_ABI_UNICOSMK
6137     {
6138       int size;
6139 
6140       /* If this is the last argument, generate the call info word (CIW).  */
6141       /* ??? We don't include the caller's line number in the CIW because
6142 	 I don't know how to determine it if debug infos are turned off.  */
6143       if (mode == VOIDmode)
6144 	{
6145 	  int i;
6146 	  HOST_WIDE_INT lo;
6147 	  HOST_WIDE_INT hi;
6148 	  rtx ciw;
6149 
6150 	  lo = 0;
6151 
6152 	  for (i = 0; i < cum.num_reg_words && i < 5; i++)
6153 	    if (cum.reg_args_type[i])
6154 	      lo |= (1 << (7 - i));
6155 
6156 	  if (cum.num_reg_words == 6 && cum.reg_args_type[5])
6157 	    lo |= 7;
6158 	  else
6159 	    lo |= cum.num_reg_words;
6160 
6161 #if HOST_BITS_PER_WIDE_INT == 32
6162 	  hi = (cum.num_args << 20) | cum.num_arg_words;
6163 #else
6164 	  lo = lo | ((HOST_WIDE_INT) cum.num_args << 52)
6165 	    | ((HOST_WIDE_INT) cum.num_arg_words << 32);
6166 	  hi = 0;
6167 #endif
6168 	  ciw = immed_double_const (lo, hi, DImode);
6169 
6170 	  return gen_rtx_UNSPEC (DImode, gen_rtvec (1, ciw),
6171 				 UNSPEC_UMK_LOAD_CIW);
6172 	}
6173 
6174       size = ALPHA_ARG_SIZE (mode, type, named);
6175       num_args = cum.num_reg_words;
6176       if (MUST_PASS_IN_STACK (mode, type)
6177 	  || cum.num_reg_words + size > 6 || cum.force_stack)
6178 	return NULL_RTX;
6179       else if (type && TYPE_MODE (type) == BLKmode)
6180 	{
6181 	  rtx reg1, reg2;
6182 
6183 	  reg1 = gen_rtx_REG (DImode, num_args + 16);
6184 	  reg1 = gen_rtx_EXPR_LIST (DImode, reg1, const0_rtx);
6185 
6186 	  /* The argument fits in two registers. Note that we still need to
6187 	     reserve a register for empty structures.  */
6188 	  if (size == 0)
6189 	    return NULL_RTX;
6190 	  else if (size == 1)
6191 	    return gen_rtx_PARALLEL (mode, gen_rtvec (1, reg1));
6192 	  else
6193 	    {
6194 	      reg2 = gen_rtx_REG (DImode, num_args + 17);
6195 	      reg2 = gen_rtx_EXPR_LIST (DImode, reg2, GEN_INT (8));
6196 	      return gen_rtx_PARALLEL (mode, gen_rtvec (2, reg1, reg2));
6197 	    }
6198 	}
6199     }
6200 #else
6201     {
6202       if (cum >= 6)
6203 	return NULL_RTX;
6204       num_args = cum;
6205 
6206       /* VOID is passed as a special flag for "last argument".  */
6207       if (type == void_type_node)
6208 	basereg = 16;
6209       else if (MUST_PASS_IN_STACK (mode, type))
6210 	return NULL_RTX;
6211       else if (FUNCTION_ARG_PASS_BY_REFERENCE (cum, mode, type, named))
6212 	basereg = 16;
6213     }
6214 #endif /* TARGET_ABI_UNICOSMK */
6215 #endif /* TARGET_ABI_OPEN_VMS */
6216 
6217   return gen_rtx_REG (mode, num_args + basereg);
6218 }
6219 
6220 tree
alpha_build_va_list()6221 alpha_build_va_list ()
6222 {
6223   tree base, ofs, record, type_decl;
6224 
6225   if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK)
6226     return ptr_type_node;
6227 
6228   record = (*lang_hooks.types.make_type) (RECORD_TYPE);
6229   type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
6230   TREE_CHAIN (record) = type_decl;
6231   TYPE_NAME (record) = type_decl;
6232 
6233   /* C++? SET_IS_AGGR_TYPE (record, 1); */
6234 
6235   ofs = build_decl (FIELD_DECL, get_identifier ("__offset"),
6236 		    integer_type_node);
6237   DECL_FIELD_CONTEXT (ofs) = record;
6238 
6239   base = build_decl (FIELD_DECL, get_identifier ("__base"),
6240 		     ptr_type_node);
6241   DECL_FIELD_CONTEXT (base) = record;
6242   TREE_CHAIN (base) = ofs;
6243 
6244   TYPE_FIELDS (record) = base;
6245   layout_type (record);
6246 
6247   return record;
6248 }
6249 
6250 void
alpha_va_start(valist,nextarg)6251 alpha_va_start (valist, nextarg)
6252      tree valist;
6253      rtx nextarg ATTRIBUTE_UNUSED;
6254 {
6255   HOST_WIDE_INT offset;
6256   tree t, offset_field, base_field;
6257 
6258   if (TREE_CODE (TREE_TYPE (valist)) == ERROR_MARK)
6259     return;
6260 
6261   if (TARGET_ABI_UNICOSMK)
6262     std_expand_builtin_va_start (valist, nextarg);
6263 
6264   /* For Unix, SETUP_INCOMING_VARARGS moves the starting address base
6265      up by 48, storing fp arg registers in the first 48 bytes, and the
6266      integer arg registers in the next 48 bytes.  This is only done,
6267      however, if any integer registers need to be stored.
6268 
6269      If no integer registers need be stored, then we must subtract 48
6270      in order to account for the integer arg registers which are counted
6271      in argsize above, but which are not actually stored on the stack.
6272      Must further be careful here about structures straddling the last
6273      integer argument register; that futzes with pretend_args_size,
6274      which changes the meaning of AP.  */
6275 
6276   if (NUM_ARGS <= 6)
6277     offset = TARGET_ABI_OPEN_VMS ? UNITS_PER_WORD : 6 * UNITS_PER_WORD;
6278   else
6279     offset = -6 * UNITS_PER_WORD + current_function_pretend_args_size;
6280 
6281   if (TARGET_ABI_OPEN_VMS)
6282     {
6283       nextarg = plus_constant (nextarg, offset);
6284       nextarg = plus_constant (nextarg, NUM_ARGS * UNITS_PER_WORD);
6285       t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
6286 		 make_tree (ptr_type_node, nextarg));
6287       TREE_SIDE_EFFECTS (t) = 1;
6288 
6289       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6290     }
6291   else
6292     {
6293       base_field = TYPE_FIELDS (TREE_TYPE (valist));
6294       offset_field = TREE_CHAIN (base_field);
6295 
6296       base_field = build (COMPONENT_REF, TREE_TYPE (base_field),
6297 			  valist, base_field);
6298       offset_field = build (COMPONENT_REF, TREE_TYPE (offset_field),
6299 			    valist, offset_field);
6300 
6301       t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
6302       t = build (PLUS_EXPR, ptr_type_node, t, build_int_2 (offset, 0));
6303       t = build (MODIFY_EXPR, TREE_TYPE (base_field), base_field, t);
6304       TREE_SIDE_EFFECTS (t) = 1;
6305       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6306 
6307       t = build_int_2 (NUM_ARGS * UNITS_PER_WORD, 0);
6308       t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field, t);
6309       TREE_SIDE_EFFECTS (t) = 1;
6310       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6311     }
6312 }
6313 
6314 rtx
alpha_va_arg(valist,type)6315 alpha_va_arg (valist, type)
6316      tree valist, type;
6317 {
6318   rtx addr;
6319   tree t, type_size, rounded_size;
6320   tree offset_field, base_field, addr_tree, addend;
6321   tree wide_type, wide_ofs;
6322   int indirect = 0;
6323 
6324   if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK)
6325     return std_expand_builtin_va_arg (valist, type);
6326 
6327   if (type == error_mark_node
6328       || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
6329       || TREE_OVERFLOW (type_size))
6330     rounded_size = size_zero_node;
6331   else
6332     rounded_size = fold (build (MULT_EXPR, sizetype,
6333 				fold (build (TRUNC_DIV_EXPR, sizetype,
6334 					     fold (build (PLUS_EXPR, sizetype,
6335 							  type_size,
6336 							  size_int (7))),
6337 					     size_int (8))),
6338 				size_int (8)));
6339 
6340   base_field = TYPE_FIELDS (TREE_TYPE (valist));
6341   offset_field = TREE_CHAIN (base_field);
6342 
6343   base_field = build (COMPONENT_REF, TREE_TYPE (base_field),
6344 		      valist, base_field);
6345   offset_field = build (COMPONENT_REF, TREE_TYPE (offset_field),
6346 			valist, offset_field);
6347 
6348   /* If the type could not be passed in registers, skip the block
6349      reserved for the registers.  */
6350   if (MUST_PASS_IN_STACK (TYPE_MODE (type), type))
6351     {
6352       t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field,
6353 		 build (MAX_EXPR, TREE_TYPE (offset_field),
6354 			offset_field, build_int_2 (6*8, 0)));
6355       TREE_SIDE_EFFECTS (t) = 1;
6356       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6357     }
6358 
6359   wide_type = make_signed_type (64);
6360   wide_ofs = save_expr (build1 (CONVERT_EXPR, wide_type, offset_field));
6361 
6362   addend = wide_ofs;
6363 
6364   if (TYPE_MODE (type) == TFmode || TYPE_MODE (type) == TCmode)
6365     {
6366       indirect = 1;
6367       rounded_size = size_int (UNITS_PER_WORD);
6368     }
6369   else if (FLOAT_TYPE_P (type))
6370     {
6371       tree fpaddend, cond;
6372 
6373       fpaddend = fold (build (PLUS_EXPR, TREE_TYPE (addend),
6374 			      addend, build_int_2 (-6*8, 0)));
6375 
6376       cond = fold (build (LT_EXPR, integer_type_node,
6377 			  wide_ofs, build_int_2 (6*8, 0)));
6378 
6379       addend = fold (build (COND_EXPR, TREE_TYPE (addend), cond,
6380 			    fpaddend, addend));
6381     }
6382 
6383   addr_tree = build (PLUS_EXPR, TREE_TYPE (base_field),
6384 		     base_field, addend);
6385 
6386   addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
6387   addr = copy_to_reg (addr);
6388 
6389   t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field,
6390 	     build (PLUS_EXPR, TREE_TYPE (offset_field),
6391 		    offset_field, rounded_size));
6392   TREE_SIDE_EFFECTS (t) = 1;
6393   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6394 
6395   if (indirect)
6396     {
6397       addr = force_reg (Pmode, addr);
6398       addr = gen_rtx_MEM (Pmode, addr);
6399     }
6400 
6401   return addr;
6402 }
6403 
6404 /* Builtins.  */
6405 
6406 enum alpha_builtin
6407 {
6408   ALPHA_BUILTIN_CMPBGE,
6409   ALPHA_BUILTIN_EXTBL,
6410   ALPHA_BUILTIN_EXTWL,
6411   ALPHA_BUILTIN_EXTLL,
6412   ALPHA_BUILTIN_EXTQL,
6413   ALPHA_BUILTIN_EXTWH,
6414   ALPHA_BUILTIN_EXTLH,
6415   ALPHA_BUILTIN_EXTQH,
6416   ALPHA_BUILTIN_INSBL,
6417   ALPHA_BUILTIN_INSWL,
6418   ALPHA_BUILTIN_INSLL,
6419   ALPHA_BUILTIN_INSQL,
6420   ALPHA_BUILTIN_INSWH,
6421   ALPHA_BUILTIN_INSLH,
6422   ALPHA_BUILTIN_INSQH,
6423   ALPHA_BUILTIN_MSKBL,
6424   ALPHA_BUILTIN_MSKWL,
6425   ALPHA_BUILTIN_MSKLL,
6426   ALPHA_BUILTIN_MSKQL,
6427   ALPHA_BUILTIN_MSKWH,
6428   ALPHA_BUILTIN_MSKLH,
6429   ALPHA_BUILTIN_MSKQH,
6430   ALPHA_BUILTIN_UMULH,
6431   ALPHA_BUILTIN_ZAP,
6432   ALPHA_BUILTIN_ZAPNOT,
6433   ALPHA_BUILTIN_AMASK,
6434   ALPHA_BUILTIN_IMPLVER,
6435   ALPHA_BUILTIN_RPCC,
6436   ALPHA_BUILTIN_THREAD_POINTER,
6437   ALPHA_BUILTIN_SET_THREAD_POINTER,
6438 
6439   /* TARGET_MAX */
6440   ALPHA_BUILTIN_MINUB8,
6441   ALPHA_BUILTIN_MINSB8,
6442   ALPHA_BUILTIN_MINUW4,
6443   ALPHA_BUILTIN_MINSW4,
6444   ALPHA_BUILTIN_MAXUB8,
6445   ALPHA_BUILTIN_MAXSB8,
6446   ALPHA_BUILTIN_MAXUW4,
6447   ALPHA_BUILTIN_MAXSW4,
6448   ALPHA_BUILTIN_PERR,
6449   ALPHA_BUILTIN_PKLB,
6450   ALPHA_BUILTIN_PKWB,
6451   ALPHA_BUILTIN_UNPKBL,
6452   ALPHA_BUILTIN_UNPKBW,
6453 
6454   /* TARGET_CIX */
6455   ALPHA_BUILTIN_CTTZ,
6456   ALPHA_BUILTIN_CTLZ,
6457   ALPHA_BUILTIN_CTPOP,
6458 
6459   ALPHA_BUILTIN_max
6460 };
6461 
6462 static unsigned int const code_for_builtin[ALPHA_BUILTIN_max] = {
6463   CODE_FOR_builtin_cmpbge,
6464   CODE_FOR_builtin_extbl,
6465   CODE_FOR_builtin_extwl,
6466   CODE_FOR_builtin_extll,
6467   CODE_FOR_builtin_extql,
6468   CODE_FOR_builtin_extwh,
6469   CODE_FOR_builtin_extlh,
6470   CODE_FOR_builtin_extqh,
6471   CODE_FOR_builtin_insbl,
6472   CODE_FOR_builtin_inswl,
6473   CODE_FOR_builtin_insll,
6474   CODE_FOR_builtin_insql,
6475   CODE_FOR_builtin_inswh,
6476   CODE_FOR_builtin_inslh,
6477   CODE_FOR_builtin_insqh,
6478   CODE_FOR_builtin_mskbl,
6479   CODE_FOR_builtin_mskwl,
6480   CODE_FOR_builtin_mskll,
6481   CODE_FOR_builtin_mskql,
6482   CODE_FOR_builtin_mskwh,
6483   CODE_FOR_builtin_msklh,
6484   CODE_FOR_builtin_mskqh,
6485   CODE_FOR_umuldi3_highpart,
6486   CODE_FOR_builtin_zap,
6487   CODE_FOR_builtin_zapnot,
6488   CODE_FOR_builtin_amask,
6489   CODE_FOR_builtin_implver,
6490   CODE_FOR_builtin_rpcc,
6491   CODE_FOR_load_tp,
6492   CODE_FOR_set_tp,
6493 
6494   /* TARGET_MAX */
6495   CODE_FOR_builtin_minub8,
6496   CODE_FOR_builtin_minsb8,
6497   CODE_FOR_builtin_minuw4,
6498   CODE_FOR_builtin_minsw4,
6499   CODE_FOR_builtin_maxub8,
6500   CODE_FOR_builtin_maxsb8,
6501   CODE_FOR_builtin_maxuw4,
6502   CODE_FOR_builtin_maxsw4,
6503   CODE_FOR_builtin_perr,
6504   CODE_FOR_builtin_pklb,
6505   CODE_FOR_builtin_pkwb,
6506   CODE_FOR_builtin_unpkbl,
6507   CODE_FOR_builtin_unpkbw,
6508 
6509   /* TARGET_CIX */
6510   CODE_FOR_builtin_cttz,
6511   CODE_FOR_builtin_ctlz,
6512   CODE_FOR_builtin_ctpop
6513 };
6514 
6515 struct alpha_builtin_def
6516 {
6517   const char *name;
6518   enum alpha_builtin code;
6519   unsigned int target_mask;
6520 };
6521 
6522 static struct alpha_builtin_def const zero_arg_builtins[] = {
6523   { "__builtin_alpha_implver",	ALPHA_BUILTIN_IMPLVER,	0 },
6524   { "__builtin_alpha_rpcc",	ALPHA_BUILTIN_RPCC,	0 }
6525 };
6526 
6527 static struct alpha_builtin_def const one_arg_builtins[] = {
6528   { "__builtin_alpha_amask",	ALPHA_BUILTIN_AMASK,	0 },
6529   { "__builtin_alpha_pklb",	ALPHA_BUILTIN_PKLB,	MASK_MAX },
6530   { "__builtin_alpha_pkwb",	ALPHA_BUILTIN_PKWB,	MASK_MAX },
6531   { "__builtin_alpha_unpkbl",	ALPHA_BUILTIN_UNPKBL,	MASK_MAX },
6532   { "__builtin_alpha_unpkbw",	ALPHA_BUILTIN_UNPKBW,	MASK_MAX },
6533   { "__builtin_alpha_cttz",	ALPHA_BUILTIN_CTTZ,	MASK_CIX },
6534   { "__builtin_alpha_ctlz",	ALPHA_BUILTIN_CTLZ,	MASK_CIX },
6535   { "__builtin_alpha_ctpop",	ALPHA_BUILTIN_CTPOP,	MASK_CIX }
6536 };
6537 
6538 static struct alpha_builtin_def const two_arg_builtins[] = {
6539   { "__builtin_alpha_cmpbge",	ALPHA_BUILTIN_CMPBGE,	0 },
6540   { "__builtin_alpha_extbl",	ALPHA_BUILTIN_EXTBL,	0 },
6541   { "__builtin_alpha_extwl",	ALPHA_BUILTIN_EXTWL,	0 },
6542   { "__builtin_alpha_extll",	ALPHA_BUILTIN_EXTLL,	0 },
6543   { "__builtin_alpha_extql",	ALPHA_BUILTIN_EXTQL,	0 },
6544   { "__builtin_alpha_extwh",	ALPHA_BUILTIN_EXTWH,	0 },
6545   { "__builtin_alpha_extlh",	ALPHA_BUILTIN_EXTLH,	0 },
6546   { "__builtin_alpha_extqh",	ALPHA_BUILTIN_EXTQH,	0 },
6547   { "__builtin_alpha_insbl",	ALPHA_BUILTIN_INSBL,	0 },
6548   { "__builtin_alpha_inswl",	ALPHA_BUILTIN_INSWL,	0 },
6549   { "__builtin_alpha_insll",	ALPHA_BUILTIN_INSLL,	0 },
6550   { "__builtin_alpha_insql",	ALPHA_BUILTIN_INSQL,	0 },
6551   { "__builtin_alpha_inswh",	ALPHA_BUILTIN_INSWH,	0 },
6552   { "__builtin_alpha_inslh",	ALPHA_BUILTIN_INSLH,	0 },
6553   { "__builtin_alpha_insqh",	ALPHA_BUILTIN_INSQH,	0 },
6554   { "__builtin_alpha_mskbl",	ALPHA_BUILTIN_MSKBL,	0 },
6555   { "__builtin_alpha_mskwl",	ALPHA_BUILTIN_MSKWL,	0 },
6556   { "__builtin_alpha_mskll",	ALPHA_BUILTIN_MSKLL,	0 },
6557   { "__builtin_alpha_mskql",	ALPHA_BUILTIN_MSKQL,	0 },
6558   { "__builtin_alpha_mskwh",	ALPHA_BUILTIN_MSKWH,	0 },
6559   { "__builtin_alpha_msklh",	ALPHA_BUILTIN_MSKLH,	0 },
6560   { "__builtin_alpha_mskqh",	ALPHA_BUILTIN_MSKQH,	0 },
6561   { "__builtin_alpha_umulh",	ALPHA_BUILTIN_UMULH,	0 },
6562   { "__builtin_alpha_zap",	ALPHA_BUILTIN_ZAP,	0 },
6563   { "__builtin_alpha_zapnot",	ALPHA_BUILTIN_ZAPNOT,	0 },
6564   { "__builtin_alpha_minub8",	ALPHA_BUILTIN_MINUB8,	MASK_MAX },
6565   { "__builtin_alpha_minsb8",	ALPHA_BUILTIN_MINSB8,	MASK_MAX },
6566   { "__builtin_alpha_minuw4",	ALPHA_BUILTIN_MINUW4,	MASK_MAX },
6567   { "__builtin_alpha_minsw4",	ALPHA_BUILTIN_MINSW4,	MASK_MAX },
6568   { "__builtin_alpha_maxub8",	ALPHA_BUILTIN_MAXUB8,	MASK_MAX },
6569   { "__builtin_alpha_maxsb8",	ALPHA_BUILTIN_MAXSB8,	MASK_MAX },
6570   { "__builtin_alpha_maxuw4",	ALPHA_BUILTIN_MAXUW4,	MASK_MAX },
6571   { "__builtin_alpha_maxsw4",	ALPHA_BUILTIN_MAXSW4,	MASK_MAX },
6572   { "__builtin_alpha_perr",	ALPHA_BUILTIN_PERR,	MASK_MAX }
6573 };
6574 
6575 static void
alpha_init_builtins()6576 alpha_init_builtins ()
6577 {
6578   const struct alpha_builtin_def *p;
6579   tree ftype;
6580   size_t i;
6581 
6582   ftype = build_function_type (long_integer_type_node, void_list_node);
6583 
6584   p = zero_arg_builtins;
6585   for (i = 0; i < ARRAY_SIZE (zero_arg_builtins); ++i, ++p)
6586     if ((target_flags & p->target_mask) == p->target_mask)
6587       builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
6588 			NULL, NULL_TREE);
6589 
6590   ftype = build_function_type_list (long_integer_type_node,
6591 				    long_integer_type_node, NULL_TREE);
6592 
6593   p = one_arg_builtins;
6594   for (i = 0; i < ARRAY_SIZE (one_arg_builtins); ++i, ++p)
6595     if ((target_flags & p->target_mask) == p->target_mask)
6596       builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
6597 			NULL, NULL_TREE);
6598 
6599   ftype = build_function_type_list (long_integer_type_node,
6600 				    long_integer_type_node,
6601 				    long_integer_type_node, NULL_TREE);
6602 
6603   p = two_arg_builtins;
6604   for (i = 0; i < ARRAY_SIZE (two_arg_builtins); ++i, ++p)
6605     if ((target_flags & p->target_mask) == p->target_mask)
6606       builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
6607 			NULL, NULL_TREE);
6608 
6609   ftype = build_function_type (ptr_type_node, void_list_node);
6610   builtin_function ("__builtin_thread_pointer", ftype,
6611 		    ALPHA_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
6612 		    NULL, NULL_TREE);
6613 
6614   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
6615   builtin_function ("__builtin_set_thread_pointer", ftype,
6616 		    ALPHA_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
6617 		    NULL, NULL_TREE);
6618 }
6619 
6620 /* Expand an expression EXP that calls a built-in function,
6621    with result going to TARGET if that's convenient
6622    (and in mode MODE if that's convenient).
6623    SUBTARGET may be used as the target for computing one of EXP's operands.
6624    IGNORE is nonzero if the value is to be ignored.  */
6625 
6626 static rtx
alpha_expand_builtin(exp,target,subtarget,mode,ignore)6627 alpha_expand_builtin (exp, target, subtarget, mode, ignore)
6628      tree exp;
6629      rtx target;
6630      rtx subtarget ATTRIBUTE_UNUSED;
6631      enum machine_mode mode ATTRIBUTE_UNUSED;
6632      int ignore ATTRIBUTE_UNUSED;
6633 {
6634 #define MAX_ARGS 2
6635 
6636   tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6637   unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6638   tree arglist = TREE_OPERAND (exp, 1);
6639   enum insn_code icode;
6640   rtx op[MAX_ARGS], pat;
6641   int arity;
6642   bool nonvoid;
6643 
6644   if (fcode >= ALPHA_BUILTIN_max)
6645     internal_error ("bad builtin fcode");
6646   icode = code_for_builtin[fcode];
6647   if (icode == 0)
6648     internal_error ("bad builtin fcode");
6649 
6650   nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
6651 
6652   for (arglist = TREE_OPERAND (exp, 1), arity = 0;
6653        arglist;
6654        arglist = TREE_CHAIN (arglist), arity++)
6655     {
6656       const struct insn_operand_data *insn_op;
6657 
6658       tree arg = TREE_VALUE (arglist);
6659       if (arg == error_mark_node)
6660 	return NULL_RTX;
6661       if (arity > MAX_ARGS)
6662 	return NULL_RTX;
6663 
6664       insn_op = &insn_data[icode].operand[arity + nonvoid];
6665 
6666       op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
6667 
6668       if (!(*insn_op->predicate) (op[arity], insn_op->mode))
6669 	op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
6670     }
6671 
6672   if (nonvoid)
6673     {
6674       enum machine_mode tmode = insn_data[icode].operand[0].mode;
6675       if (!target
6676 	  || GET_MODE (target) != tmode
6677 	  || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6678 	target = gen_reg_rtx (tmode);
6679     }
6680 
6681   switch (arity)
6682     {
6683     case 0:
6684       pat = GEN_FCN (icode) (target);
6685       break;
6686     case 1:
6687       if (nonvoid)
6688         pat = GEN_FCN (icode) (target, op[0]);
6689       else
6690 	pat = GEN_FCN (icode) (op[0]);
6691       break;
6692     case 2:
6693       pat = GEN_FCN (icode) (target, op[0], op[1]);
6694       break;
6695     default:
6696       abort ();
6697     }
6698   if (!pat)
6699     return NULL_RTX;
6700   emit_insn (pat);
6701 
6702   if (nonvoid)
6703     return target;
6704   else
6705     return const0_rtx;
6706 }
6707 
6708 /* This page contains routines that are used to determine what the function
6709    prologue and epilogue code will do and write them out.  */
6710 
6711 /* Compute the size of the save area in the stack.  */
6712 
6713 /* These variables are used for communication between the following functions.
6714    They indicate various things about the current function being compiled
6715    that are used to tell what kind of prologue, epilogue and procedure
6716    descriptior to generate.  */
6717 
6718 /* Nonzero if we need a stack procedure.  */
6719 enum alpha_procedure_types {PT_NULL = 0, PT_REGISTER = 1, PT_STACK = 2};
6720 static enum alpha_procedure_types alpha_procedure_type;
6721 
6722 /* Register number (either FP or SP) that is used to unwind the frame.  */
6723 static int vms_unwind_regno;
6724 
6725 /* Register number used to save FP.  We need not have one for RA since
6726    we don't modify it for register procedures.  This is only defined
6727    for register frame procedures.  */
6728 static int vms_save_fp_regno;
6729 
6730 /* Register number used to reference objects off our PV.  */
6731 static int vms_base_regno;
6732 
6733 /* Compute register masks for saved registers.  */
6734 
6735 static void
alpha_sa_mask(imaskP,fmaskP)6736 alpha_sa_mask (imaskP, fmaskP)
6737     unsigned long *imaskP;
6738     unsigned long *fmaskP;
6739 {
6740   unsigned long imask = 0;
6741   unsigned long fmask = 0;
6742   unsigned int i;
6743 
6744   /* Irritatingly, there are two kinds of thunks -- those created with
6745      TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
6746      through the regular part of the compiler.  In the
6747      TARGET_ASM_OUTPUT_MI_THUNK case we don't have valid register life
6748      info, but assemble_start_function wants to output .frame and
6749      .mask directives.  */
6750   if (current_function_is_thunk && !no_new_pseudos)
6751     {
6752       *imaskP = 0;
6753       *fmaskP = 0;
6754       return;
6755     }
6756 
6757   if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
6758     imask |= (1L << HARD_FRAME_POINTER_REGNUM);
6759 
6760   /* One for every register we have to save.  */
6761   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6762     if (! fixed_regs[i] && ! call_used_regs[i]
6763 	&& regs_ever_live[i] && i != REG_RA
6764 	&& (!TARGET_ABI_UNICOSMK || i != HARD_FRAME_POINTER_REGNUM))
6765       {
6766 	if (i < 32)
6767 	  imask |= (1L << i);
6768 	else
6769 	  fmask |= (1L << (i - 32));
6770       }
6771 
6772   /* We need to restore these for the handler.  */
6773   if (current_function_calls_eh_return)
6774     {
6775       for (i = 0; ; ++i)
6776 	{
6777 	  unsigned regno = EH_RETURN_DATA_REGNO (i);
6778 	  if (regno == INVALID_REGNUM)
6779 	    break;
6780 	  imask |= 1L << regno;
6781 	}
6782     }
6783 
6784   /* If any register spilled, then spill the return address also.  */
6785   /* ??? This is required by the Digital stack unwind specification
6786      and isn't needed if we're doing Dwarf2 unwinding.  */
6787   if (imask || fmask || alpha_ra_ever_killed ())
6788     imask |= (1L << REG_RA);
6789 
6790   *imaskP = imask;
6791   *fmaskP = fmask;
6792 }
6793 
6794 int
alpha_sa_size()6795 alpha_sa_size ()
6796 {
6797   unsigned long mask[2];
6798   int sa_size = 0;
6799   int i, j;
6800 
6801   alpha_sa_mask (&mask[0], &mask[1]);
6802 
6803   if (TARGET_ABI_UNICOSMK)
6804     {
6805       if (mask[0] || mask[1])
6806 	sa_size = 14;
6807     }
6808   else
6809     {
6810       for (j = 0; j < 2; ++j)
6811 	for (i = 0; i < 32; ++i)
6812 	  if ((mask[j] >> i) & 1)
6813 	    sa_size++;
6814     }
6815 
6816   if (TARGET_ABI_UNICOSMK)
6817     {
6818       /* We might not need to generate a frame if we don't make any calls
6819 	 (including calls to __T3E_MISMATCH if this is a vararg function),
6820 	 don't have any local variables which require stack slots, don't
6821 	 use alloca and have not determined that we need a frame for other
6822 	 reasons.  */
6823 
6824       alpha_procedure_type
6825 	= (sa_size || get_frame_size() != 0
6826 	   || current_function_outgoing_args_size
6827 	   || current_function_stdarg || current_function_calls_alloca
6828 	   || frame_pointer_needed)
6829 	  ? PT_STACK : PT_REGISTER;
6830 
6831       /* Always reserve space for saving callee-saved registers if we
6832 	 need a frame as required by the calling convention.  */
6833       if (alpha_procedure_type == PT_STACK)
6834         sa_size = 14;
6835     }
6836   else if (TARGET_ABI_OPEN_VMS)
6837     {
6838       /* Start by assuming we can use a register procedure if we don't
6839 	 make any calls (REG_RA not used) or need to save any
6840 	 registers and a stack procedure if we do.  */
6841       if ((mask[0] >> REG_RA) & 1)
6842 	alpha_procedure_type = PT_STACK;
6843       else if (get_frame_size() != 0)
6844 	alpha_procedure_type = PT_REGISTER;
6845       else
6846 	alpha_procedure_type = PT_NULL;
6847 
6848       /* Don't reserve space for saving FP & RA yet.  Do that later after we've
6849 	 made the final decision on stack procedure vs register procedure.  */
6850       if (alpha_procedure_type == PT_STACK)
6851 	sa_size -= 2;
6852 
6853       /* Decide whether to refer to objects off our PV via FP or PV.
6854 	 If we need FP for something else or if we receive a nonlocal
6855 	 goto (which expects PV to contain the value), we must use PV.
6856 	 Otherwise, start by assuming we can use FP.  */
6857 
6858       vms_base_regno
6859 	= (frame_pointer_needed
6860 	   || current_function_has_nonlocal_label
6861 	   || alpha_procedure_type == PT_STACK
6862 	   || current_function_outgoing_args_size)
6863 	  ? REG_PV : HARD_FRAME_POINTER_REGNUM;
6864 
6865       /* If we want to copy PV into FP, we need to find some register
6866 	 in which to save FP.  */
6867 
6868       vms_save_fp_regno = -1;
6869       if (vms_base_regno == HARD_FRAME_POINTER_REGNUM)
6870 	for (i = 0; i < 32; i++)
6871 	  if (! fixed_regs[i] && call_used_regs[i] && ! regs_ever_live[i])
6872 	    vms_save_fp_regno = i;
6873 
6874       if (vms_save_fp_regno == -1 && alpha_procedure_type == PT_REGISTER)
6875 	vms_base_regno = REG_PV, alpha_procedure_type = PT_STACK;
6876       else if (alpha_procedure_type == PT_NULL)
6877 	vms_base_regno = REG_PV;
6878 
6879       /* Stack unwinding should be done via FP unless we use it for PV.  */
6880       vms_unwind_regno = (vms_base_regno == REG_PV
6881 			  ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
6882 
6883       /* If this is a stack procedure, allow space for saving FP and RA.  */
6884       if (alpha_procedure_type == PT_STACK)
6885 	sa_size += 2;
6886     }
6887   else
6888     {
6889       /* Our size must be even (multiple of 16 bytes).  */
6890       if (sa_size & 1)
6891 	sa_size++;
6892     }
6893 
6894   return sa_size * 8;
6895 }
6896 
6897 int
alpha_pv_save_size()6898 alpha_pv_save_size ()
6899 {
6900   alpha_sa_size ();
6901   return alpha_procedure_type == PT_STACK ? 8 : 0;
6902 }
6903 
6904 int
alpha_using_fp()6905 alpha_using_fp ()
6906 {
6907   alpha_sa_size ();
6908   return vms_unwind_regno == HARD_FRAME_POINTER_REGNUM;
6909 }
6910 
6911 #if TARGET_ABI_OPEN_VMS
6912 
6913 const struct attribute_spec vms_attribute_table[] =
6914 {
6915   /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
6916   { "overlaid",   0, 0, true,  false, false, NULL },
6917   { "global",     0, 0, true,  false, false, NULL },
6918   { "initialize", 0, 0, true,  false, false, NULL },
6919   { NULL,         0, 0, false, false, false, NULL }
6920 };
6921 
6922 #endif
6923 
6924 static int
find_lo_sum_using_gp(px,data)6925 find_lo_sum_using_gp (px, data)
6926      rtx *px;
6927      void *data ATTRIBUTE_UNUSED;
6928 {
6929   return GET_CODE (*px) == LO_SUM && XEXP (*px, 0) == pic_offset_table_rtx;
6930 }
6931 
6932 int
alpha_find_lo_sum_using_gp(insn)6933 alpha_find_lo_sum_using_gp (insn)
6934      rtx insn;
6935 {
6936   return for_each_rtx (&PATTERN (insn), find_lo_sum_using_gp, NULL) > 0;
6937 }
6938 
6939 static int
alpha_does_function_need_gp()6940 alpha_does_function_need_gp ()
6941 {
6942   rtx insn;
6943 
6944   /* The GP being variable is an OSF abi thing.  */
6945   if (! TARGET_ABI_OSF)
6946     return 0;
6947 
6948   if (TARGET_PROFILING_NEEDS_GP && current_function_profile)
6949     return 1;
6950 
6951   if (current_function_is_thunk)
6952     return 1;
6953 
6954   /* If we need a GP (we have a LDSYM insn or a CALL_INSN), load it first.
6955      Even if we are a static function, we still need to do this in case
6956      our address is taken and passed to something like qsort.  */
6957 
6958   push_topmost_sequence ();
6959   insn = get_insns ();
6960   pop_topmost_sequence ();
6961 
6962   for (; insn; insn = NEXT_INSN (insn))
6963     if (INSN_P (insn)
6964 	&& GET_CODE (PATTERN (insn)) != USE
6965 	&& GET_CODE (PATTERN (insn)) != CLOBBER
6966 	&& get_attr_usegp (insn))
6967       return 1;
6968 
6969   return 0;
6970 }
6971 
6972 /* Write a version stamp.  Don't write anything if we are running as a
6973    cross-compiler.  Otherwise, use the versions in /usr/include/stamp.h.  */
6974 
6975 #ifdef HAVE_STAMP_H
6976 #include <stamp.h>
6977 #endif
6978 
6979 void
alpha_write_verstamp(file)6980 alpha_write_verstamp (file)
6981      FILE *file ATTRIBUTE_UNUSED;
6982 {
6983 #ifdef MS_STAMP
6984   fprintf (file, "\t.verstamp %d %d\n", MS_STAMP, LS_STAMP);
6985 #endif
6986 }
6987 
6988 /* Helper function to set RTX_FRAME_RELATED_P on instructions, including
6989    sequences.  */
6990 
6991 static rtx
set_frame_related_p()6992 set_frame_related_p ()
6993 {
6994   rtx seq = get_insns ();
6995   rtx insn;
6996 
6997   end_sequence ();
6998 
6999   if (!seq)
7000     return NULL_RTX;
7001 
7002   if (INSN_P (seq))
7003     {
7004       insn = seq;
7005       while (insn != NULL_RTX)
7006 	{
7007 	  RTX_FRAME_RELATED_P (insn) = 1;
7008 	  insn = NEXT_INSN (insn);
7009 	}
7010       seq = emit_insn (seq);
7011     }
7012   else
7013     {
7014       seq = emit_insn (seq);
7015       RTX_FRAME_RELATED_P (seq) = 1;
7016     }
7017   return seq;
7018 }
7019 
7020 #define FRP(exp)  (start_sequence (), exp, set_frame_related_p ())
7021 
7022 /* Write function prologue.  */
7023 
7024 /* On vms we have two kinds of functions:
7025 
7026    - stack frame (PROC_STACK)
7027 	these are 'normal' functions with local vars and which are
7028 	calling other functions
7029    - register frame (PROC_REGISTER)
7030 	keeps all data in registers, needs no stack
7031 
7032    We must pass this to the assembler so it can generate the
7033    proper pdsc (procedure descriptor)
7034    This is done with the '.pdesc' command.
7035 
7036    On not-vms, we don't really differentiate between the two, as we can
7037    simply allocate stack without saving registers.  */
7038 
7039 void
alpha_expand_prologue()7040 alpha_expand_prologue ()
7041 {
7042   /* Registers to save.  */
7043   unsigned long imask = 0;
7044   unsigned long fmask = 0;
7045   /* Stack space needed for pushing registers clobbered by us.  */
7046   HOST_WIDE_INT sa_size;
7047   /* Complete stack size needed.  */
7048   HOST_WIDE_INT frame_size;
7049   /* Offset from base reg to register save area.  */
7050   HOST_WIDE_INT reg_offset;
7051   rtx sa_reg, mem;
7052   int i;
7053 
7054   sa_size = alpha_sa_size ();
7055 
7056   frame_size = get_frame_size ();
7057   if (TARGET_ABI_OPEN_VMS)
7058     frame_size = ALPHA_ROUND (sa_size
7059 			      + (alpha_procedure_type == PT_STACK ? 8 : 0)
7060 			      + frame_size
7061 			      + current_function_pretend_args_size);
7062   else if (TARGET_ABI_UNICOSMK)
7063     /* We have to allocate space for the DSIB if we generate a frame.  */
7064     frame_size = ALPHA_ROUND (sa_size
7065 			      + (alpha_procedure_type == PT_STACK ? 48 : 0))
7066 		 + ALPHA_ROUND (frame_size
7067 				+ current_function_outgoing_args_size);
7068   else
7069     frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
7070 		  + sa_size
7071 		  + ALPHA_ROUND (frame_size
7072 				 + current_function_pretend_args_size));
7073 
7074   if (warn_stack_larger_than && frame_size > stack_larger_than_size)
7075     warning ("stack usage is %d bytes", frame_size);
7076 
7077   if (TARGET_ABI_OPEN_VMS)
7078     reg_offset = 8;
7079   else
7080     reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
7081 
7082   alpha_sa_mask (&imask, &fmask);
7083 
7084   /* Emit an insn to reload GP, if needed.  */
7085   if (TARGET_ABI_OSF)
7086     {
7087       alpha_function_needs_gp = alpha_does_function_need_gp ();
7088       if (alpha_function_needs_gp)
7089 	emit_insn (gen_prologue_ldgp ());
7090     }
7091 
7092   /* TARGET_PROFILING_NEEDS_GP actually implies that we need to insert
7093      the call to mcount ourselves, rather than having the linker do it
7094      magically in response to -pg.  Since _mcount has special linkage,
7095      don't represent the call as a call.  */
7096   if (TARGET_PROFILING_NEEDS_GP && current_function_profile)
7097     emit_insn (gen_prologue_mcount ());
7098 
7099   if (TARGET_ABI_UNICOSMK)
7100     unicosmk_gen_dsib (&imask);
7101 
7102   /* Adjust the stack by the frame size.  If the frame size is > 4096
7103      bytes, we need to be sure we probe somewhere in the first and last
7104      4096 bytes (we can probably get away without the latter test) and
7105      every 8192 bytes in between.  If the frame size is > 32768, we
7106      do this in a loop.  Otherwise, we generate the explicit probe
7107      instructions.
7108 
7109      Note that we are only allowed to adjust sp once in the prologue.  */
7110 
7111   if (flag_stack_check || STACK_CHECK_BUILTIN)
7112     {
7113       if (frame_size <= 32768)
7114 	{
7115 	  if (frame_size > 4096)
7116 	    {
7117 	      int probed = 4096;
7118 
7119 	      do
7120 		emit_insn (gen_probe_stack (GEN_INT (TARGET_ABI_UNICOSMK
7121 						     ? -probed + 64
7122 						     : -probed)));
7123 	      while ((probed += 8192) < frame_size);
7124 
7125 	      /* We only have to do this probe if we aren't saving
7126 		 registers.  */
7127 	      if (sa_size == 0 && probed + 4096 < frame_size)
7128 		emit_insn (gen_probe_stack (GEN_INT (-frame_size)));
7129 	    }
7130 
7131 	  if (frame_size != 0)
7132 	    FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
7133 					GEN_INT (TARGET_ABI_UNICOSMK
7134 						 ? -frame_size + 64
7135 						 : -frame_size))));
7136 	}
7137       else
7138 	{
7139 	  /* Here we generate code to set R22 to SP + 4096 and set R23 to the
7140 	     number of 8192 byte blocks to probe.  We then probe each block
7141 	     in the loop and then set SP to the proper location.  If the
7142 	     amount remaining is > 4096, we have to do one more probe if we
7143 	     are not saving any registers.  */
7144 
7145 	  HOST_WIDE_INT blocks = (frame_size + 4096) / 8192;
7146 	  HOST_WIDE_INT leftover = frame_size + 4096 - blocks * 8192;
7147 	  rtx ptr = gen_rtx_REG (DImode, 22);
7148 	  rtx count = gen_rtx_REG (DImode, 23);
7149 	  rtx seq;
7150 
7151 	  emit_move_insn (count, GEN_INT (blocks));
7152 	  emit_insn (gen_adddi3 (ptr, stack_pointer_rtx,
7153 				 GEN_INT (TARGET_ABI_UNICOSMK
7154 					  ? 4096 - 64 : 4096)));
7155 
7156 	  /* Because of the difficulty in emitting a new basic block this
7157 	     late in the compilation, generate the loop as a single insn.  */
7158 	  emit_insn (gen_prologue_stack_probe_loop (count, ptr));
7159 
7160 	  if (leftover > 4096 && sa_size == 0)
7161 	    {
7162 	      rtx last = gen_rtx_MEM (DImode, plus_constant (ptr, -leftover));
7163 	      MEM_VOLATILE_P (last) = 1;
7164 	      emit_move_insn (last, const0_rtx);
7165 	    }
7166 
7167 	  if (TARGET_ABI_WINDOWS_NT)
7168 	    {
7169 	      /* For NT stack unwind (done by 'reverse execution'), it's
7170 		 not OK to take the result of a loop, even though the value
7171 		 is already in ptr, so we reload it via a single operation
7172 		 and subtract it to sp.
7173 
7174 		 Yes, that's correct -- we have to reload the whole constant
7175 		 into a temporary via ldah+lda then subtract from sp.  To
7176 		 ensure we get ldah+lda, we use a special pattern.  */
7177 
7178 	      HOST_WIDE_INT lo, hi;
7179 	      lo = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
7180 	      hi = frame_size - lo;
7181 
7182 	      emit_move_insn (ptr, GEN_INT (hi));
7183 	      emit_insn (gen_nt_lda (ptr, GEN_INT (lo)));
7184 	      seq = emit_insn (gen_subdi3 (stack_pointer_rtx, stack_pointer_rtx,
7185 					   ptr));
7186 	    }
7187 	  else
7188 	    {
7189 	      seq = emit_insn (gen_adddi3 (stack_pointer_rtx, ptr,
7190 					   GEN_INT (-leftover)));
7191 	    }
7192 
7193 	  /* This alternative is special, because the DWARF code cannot
7194 	     possibly intuit through the loop above.  So we invent this
7195 	     note it looks at instead.  */
7196 	  RTX_FRAME_RELATED_P (seq) = 1;
7197 	  REG_NOTES (seq)
7198 	    = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7199 				 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7200 				   gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7201 						 GEN_INT (TARGET_ABI_UNICOSMK
7202 							  ? -frame_size + 64
7203 							  : -frame_size))),
7204 				 REG_NOTES (seq));
7205 	}
7206     }
7207   else
7208     {
7209       if (frame_size <= 32768)
7210 	{
7211 	  if (frame_size != 0)
7212 	    FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
7213 					GEN_INT (TARGET_ABI_UNICOSMK
7214 						 ? -frame_size + 64
7215 						 : -frame_size))));
7216 	}
7217       else
7218 	{
7219 	  rtx count = gen_rtx_REG (DImode, 23);
7220 	  rtx seq;
7221 
7222 	  emit_move_insn (count, GEN_INT (TARGET_ABI_UNICOSMK
7223 					  ? -frame_size + 64
7224 					  : -frame_size));
7225 	  seq = emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
7226 				       count));
7227 
7228 	  /* This alternative is special, because the DWARF code cannot
7229 	     possibly intuit through the loop above.  So we invent this
7230 	     note it looks at instead.  */
7231 	  RTX_FRAME_RELATED_P (seq) = 1;
7232 	  REG_NOTES (seq)
7233 	    = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7234 				 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7235 				   gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7236 						 GEN_INT (TARGET_ABI_UNICOSMK
7237 							  ? -frame_size + 64
7238 							  : -frame_size))),
7239 				 REG_NOTES (seq));
7240 	}
7241     }
7242 
7243   if (!TARGET_ABI_UNICOSMK)
7244     {
7245       /* Cope with very large offsets to the register save area.  */
7246       sa_reg = stack_pointer_rtx;
7247       if (reg_offset + sa_size > 0x8000)
7248 	{
7249 	  int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
7250 	  HOST_WIDE_INT bias;
7251 
7252 	  if (low + sa_size <= 0x8000)
7253 	    bias = reg_offset - low, reg_offset = low;
7254 	  else
7255 	    bias = reg_offset, reg_offset = 0;
7256 
7257 	  sa_reg = gen_rtx_REG (DImode, 24);
7258 	  FRP (emit_insn (gen_adddi3 (sa_reg, stack_pointer_rtx,
7259 				      GEN_INT (bias))));
7260 	}
7261 
7262       /* Save regs in stack order.  Beginning with VMS PV.  */
7263       if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
7264 	{
7265 	  mem = gen_rtx_MEM (DImode, stack_pointer_rtx);
7266 	  set_mem_alias_set (mem, alpha_sr_alias_set);
7267 	  FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_PV)));
7268 	}
7269 
7270       /* Save register RA next.  */
7271       if (imask & (1L << REG_RA))
7272 	{
7273 	  mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
7274 	  set_mem_alias_set (mem, alpha_sr_alias_set);
7275 	  FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_RA)));
7276 	  imask &= ~(1L << REG_RA);
7277 	  reg_offset += 8;
7278 	}
7279 
7280       /* Now save any other registers required to be saved.  */
7281       for (i = 0; i < 31; i++)
7282 	if (imask & (1L << i))
7283 	  {
7284 	    mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
7285 	    set_mem_alias_set (mem, alpha_sr_alias_set);
7286 	    FRP (emit_move_insn (mem, gen_rtx_REG (DImode, i)));
7287 	    reg_offset += 8;
7288 	  }
7289 
7290       for (i = 0; i < 31; i++)
7291 	if (fmask & (1L << i))
7292 	  {
7293 	    mem = gen_rtx_MEM (DFmode, plus_constant (sa_reg, reg_offset));
7294 	    set_mem_alias_set (mem, alpha_sr_alias_set);
7295 	    FRP (emit_move_insn (mem, gen_rtx_REG (DFmode, i+32)));
7296 	    reg_offset += 8;
7297 	  }
7298     }
7299   else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
7300     {
7301       /* The standard frame on the T3E includes space for saving registers.
7302 	 We just have to use it. We don't have to save the return address and
7303 	 the old frame pointer here - they are saved in the DSIB.  */
7304 
7305       reg_offset = -56;
7306       for (i = 9; i < 15; i++)
7307 	if (imask & (1L << i))
7308 	  {
7309 	    mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx,
7310 						     reg_offset));
7311 	    set_mem_alias_set (mem, alpha_sr_alias_set);
7312 	    FRP (emit_move_insn (mem, gen_rtx_REG (DImode, i)));
7313 	    reg_offset -= 8;
7314 	  }
7315       for (i = 2; i < 10; i++)
7316 	if (fmask & (1L << i))
7317 	  {
7318 	    mem = gen_rtx_MEM (DFmode, plus_constant (hard_frame_pointer_rtx,
7319 						      reg_offset));
7320 	    set_mem_alias_set (mem, alpha_sr_alias_set);
7321 	    FRP (emit_move_insn (mem, gen_rtx_REG (DFmode, i+32)));
7322 	    reg_offset -= 8;
7323 	  }
7324     }
7325 
7326   if (TARGET_ABI_OPEN_VMS)
7327     {
7328       if (alpha_procedure_type == PT_REGISTER)
7329 	/* Register frame procedures save the fp.
7330 	   ?? Ought to have a dwarf2 save for this.  */
7331 	emit_move_insn (gen_rtx_REG (DImode, vms_save_fp_regno),
7332 			hard_frame_pointer_rtx);
7333 
7334       if (alpha_procedure_type != PT_NULL && vms_base_regno != REG_PV)
7335 	emit_insn (gen_force_movdi (gen_rtx_REG (DImode, vms_base_regno),
7336 				    gen_rtx_REG (DImode, REG_PV)));
7337 
7338       if (alpha_procedure_type != PT_NULL
7339 	  && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
7340 	FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
7341 
7342       /* If we have to allocate space for outgoing args, do it now.  */
7343       if (current_function_outgoing_args_size != 0)
7344 	FRP (emit_move_insn
7345 	     (stack_pointer_rtx,
7346 	      plus_constant (hard_frame_pointer_rtx,
7347 			     - (ALPHA_ROUND
7348 				(current_function_outgoing_args_size)))));
7349     }
7350   else if (!TARGET_ABI_UNICOSMK)
7351     {
7352       /* If we need a frame pointer, set it from the stack pointer.  */
7353       if (frame_pointer_needed)
7354 	{
7355 	  if (TARGET_CAN_FAULT_IN_PROLOGUE)
7356 	    FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
7357 	  else
7358 	    /* This must always be the last instruction in the
7359 	       prologue, thus we emit a special move + clobber.  */
7360 	      FRP (emit_insn (gen_init_fp (hard_frame_pointer_rtx,
7361 				           stack_pointer_rtx, sa_reg)));
7362 	}
7363     }
7364 
7365   /* The ABIs for VMS and OSF/1 say that while we can schedule insns into
7366      the prologue, for exception handling reasons, we cannot do this for
7367      any insn that might fault.  We could prevent this for mems with a
7368      (clobber:BLK (scratch)), but this doesn't work for fp insns.  So we
7369      have to prevent all such scheduling with a blockage.
7370 
7371      Linux, on the other hand, never bothered to implement OSF/1's
7372      exception handling, and so doesn't care about such things.  Anyone
7373      planning to use dwarf2 frame-unwind info can also omit the blockage.  */
7374 
7375   if (! TARGET_CAN_FAULT_IN_PROLOGUE)
7376     emit_insn (gen_blockage ());
7377 }
7378 
7379 /* Output the textual info surrounding the prologue.  */
7380 
7381 void
alpha_start_function(file,fnname,decl)7382 alpha_start_function (file, fnname, decl)
7383      FILE *file;
7384      const char *fnname;
7385      tree decl ATTRIBUTE_UNUSED;
7386 {
7387   unsigned long imask = 0;
7388   unsigned long fmask = 0;
7389   /* Stack space needed for pushing registers clobbered by us.  */
7390   HOST_WIDE_INT sa_size;
7391   /* Complete stack size needed.  */
7392   HOST_WIDE_INT frame_size;
7393   /* Offset from base reg to register save area.  */
7394   HOST_WIDE_INT reg_offset;
7395   char *entry_label = (char *) alloca (strlen (fnname) + 6);
7396   int i;
7397 
7398   /* Don't emit an extern directive for functions defined in the same file.  */
7399   if (TARGET_ABI_UNICOSMK)
7400     {
7401       tree name_tree;
7402       name_tree = get_identifier (fnname);
7403       TREE_ASM_WRITTEN (name_tree) = 1;
7404     }
7405 
7406   alpha_fnname = fnname;
7407   sa_size = alpha_sa_size ();
7408 
7409   frame_size = get_frame_size ();
7410   if (TARGET_ABI_OPEN_VMS)
7411     frame_size = ALPHA_ROUND (sa_size
7412 			      + (alpha_procedure_type == PT_STACK ? 8 : 0)
7413 			      + frame_size
7414 			      + current_function_pretend_args_size);
7415   else if (TARGET_ABI_UNICOSMK)
7416     frame_size = ALPHA_ROUND (sa_size
7417 			      + (alpha_procedure_type == PT_STACK ? 48 : 0))
7418 		 + ALPHA_ROUND (frame_size
7419 			      + current_function_outgoing_args_size);
7420   else
7421     frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
7422 		  + sa_size
7423 		  + ALPHA_ROUND (frame_size
7424 				 + current_function_pretend_args_size));
7425 
7426   if (TARGET_ABI_OPEN_VMS)
7427     reg_offset = 8;
7428   else
7429     reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
7430 
7431   alpha_sa_mask (&imask, &fmask);
7432 
7433   /* Ecoff can handle multiple .file directives, so put out file and lineno.
7434      We have to do that before the .ent directive as we cannot switch
7435      files within procedures with native ecoff because line numbers are
7436      linked to procedure descriptors.
7437      Outputting the lineno helps debugging of one line functions as they
7438      would otherwise get no line number at all. Please note that we would
7439      like to put out last_linenum from final.c, but it is not accessible.  */
7440 
7441   if (write_symbols == SDB_DEBUG)
7442     {
7443 #ifdef ASM_OUTPUT_SOURCE_FILENAME
7444       ASM_OUTPUT_SOURCE_FILENAME (file,
7445 				  DECL_SOURCE_FILE (current_function_decl));
7446 #endif
7447 #ifdef ASM_OUTPUT_SOURCE_LINE
7448       if (debug_info_level != DINFO_LEVEL_TERSE)
7449         ASM_OUTPUT_SOURCE_LINE (file,
7450 				DECL_SOURCE_LINE (current_function_decl));
7451 #endif
7452     }
7453 
7454   /* Issue function start and label.  */
7455   if (TARGET_ABI_OPEN_VMS
7456       || (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive))
7457     {
7458       fputs ("\t.ent ", file);
7459       assemble_name (file, fnname);
7460       putc ('\n', file);
7461 
7462       /* If the function needs GP, we'll write the "..ng" label there.
7463 	 Otherwise, do it here.  */
7464       if (TARGET_ABI_OSF
7465           && ! alpha_function_needs_gp
7466 	  && ! current_function_is_thunk)
7467 	{
7468 	  putc ('$', file);
7469 	  assemble_name (file, fnname);
7470 	  fputs ("..ng:\n", file);
7471 	}
7472     }
7473 
7474   strcpy (entry_label, fnname);
7475   if (TARGET_ABI_OPEN_VMS)
7476     strcat (entry_label, "..en");
7477 
7478   /* For public functions, the label must be globalized by appending an
7479      additional colon.  */
7480   if (TARGET_ABI_UNICOSMK && TREE_PUBLIC (decl))
7481     strcat (entry_label, ":");
7482 
7483   ASM_OUTPUT_LABEL (file, entry_label);
7484   inside_function = TRUE;
7485 
7486   if (TARGET_ABI_OPEN_VMS)
7487     fprintf (file, "\t.base $%d\n", vms_base_regno);
7488 
7489   if (!TARGET_ABI_OPEN_VMS && !TARGET_ABI_UNICOSMK && TARGET_IEEE_CONFORMANT
7490       && !flag_inhibit_size_directive)
7491     {
7492       /* Set flags in procedure descriptor to request IEEE-conformant
7493 	 math-library routines.  The value we set it to is PDSC_EXC_IEEE
7494 	 (/usr/include/pdsc.h).  */
7495       fputs ("\t.eflag 48\n", file);
7496     }
7497 
7498   /* Set up offsets to alpha virtual arg/local debugging pointer.  */
7499   alpha_auto_offset = -frame_size + current_function_pretend_args_size;
7500   alpha_arg_offset = -frame_size + 48;
7501 
7502   /* Describe our frame.  If the frame size is larger than an integer,
7503      print it as zero to avoid an assembler error.  We won't be
7504      properly describing such a frame, but that's the best we can do.  */
7505   if (TARGET_ABI_UNICOSMK)
7506     ;
7507   else if (TARGET_ABI_OPEN_VMS)
7508     {
7509       fprintf (file, "\t.frame $%d,", vms_unwind_regno);
7510       fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7511 	       frame_size >= ((HOST_WIDE_INT) 1 << 31) ? 0 : frame_size);
7512       fputs (",$26,", file);
7513       fprintf (file, HOST_WIDE_INT_PRINT_DEC, reg_offset);
7514       fputs ("\n", file);
7515     }
7516   else if (!flag_inhibit_size_directive)
7517     {
7518       fprintf (file, "\t.frame $%d,",
7519 	       (frame_pointer_needed
7520 		? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM));
7521       fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7522 	       frame_size >= (1l << 31) ? 0 : frame_size);
7523       fprintf (file, ",$26,%d\n", current_function_pretend_args_size);
7524     }
7525 
7526   /* Describe which registers were spilled.  */
7527   if (TARGET_ABI_UNICOSMK)
7528     ;
7529   else if (TARGET_ABI_OPEN_VMS)
7530     {
7531       if (imask)
7532         /* ??? Does VMS care if mask contains ra?  The old code didn't
7533            set it, so I don't here.  */
7534 	fprintf (file, "\t.mask 0x%lx,0\n", imask & ~(1L << REG_RA));
7535       if (fmask)
7536 	fprintf (file, "\t.fmask 0x%lx,0\n", fmask);
7537       if (alpha_procedure_type == PT_REGISTER)
7538 	fprintf (file, "\t.fp_save $%d\n", vms_save_fp_regno);
7539     }
7540   else if (!flag_inhibit_size_directive)
7541     {
7542       if (imask)
7543 	{
7544 	  fprintf (file, "\t.mask 0x%lx,", imask);
7545 	  fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7546 		   frame_size >= (1l << 31) ? 0 : reg_offset - frame_size);
7547 	  putc ('\n', file);
7548 
7549 	  for (i = 0; i < 32; ++i)
7550 	    if (imask & (1L << i))
7551 	      reg_offset += 8;
7552 	}
7553 
7554       if (fmask)
7555 	{
7556 	  fprintf (file, "\t.fmask 0x%lx,", fmask);
7557 	  fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7558 		   frame_size >= (1l << 31) ? 0 : reg_offset - frame_size);
7559 	  putc ('\n', file);
7560 	}
7561     }
7562 
7563 #if TARGET_ABI_OPEN_VMS
7564   /* Ifdef'ed cause link_section are only available then.  */
7565   readonly_data_section ();
7566   fprintf (file, "\t.align 3\n");
7567   assemble_name (file, fnname); fputs ("..na:\n", file);
7568   fputs ("\t.ascii \"", file);
7569   assemble_name (file, fnname);
7570   fputs ("\\0\"\n", file);
7571   alpha_need_linkage (fnname, 1);
7572   text_section ();
7573 #endif
7574 }
7575 
7576 /* Emit the .prologue note at the scheduled end of the prologue.  */
7577 
7578 static void
alpha_output_function_end_prologue(file)7579 alpha_output_function_end_prologue (file)
7580      FILE *file;
7581 {
7582   if (TARGET_ABI_UNICOSMK)
7583     ;
7584   else if (TARGET_ABI_OPEN_VMS)
7585     fputs ("\t.prologue\n", file);
7586   else if (TARGET_ABI_WINDOWS_NT)
7587     fputs ("\t.prologue 0\n", file);
7588   else if (!flag_inhibit_size_directive)
7589     fprintf (file, "\t.prologue %d\n",
7590 	     alpha_function_needs_gp || current_function_is_thunk);
7591 }
7592 
7593 /* Write function epilogue.  */
7594 
7595 /* ??? At some point we will want to support full unwind, and so will
7596    need to mark the epilogue as well.  At the moment, we just confuse
7597    dwarf2out.  */
7598 #undef FRP
7599 #define FRP(exp) exp
7600 
7601 void
alpha_expand_epilogue()7602 alpha_expand_epilogue ()
7603 {
7604   /* Registers to save.  */
7605   unsigned long imask = 0;
7606   unsigned long fmask = 0;
7607   /* Stack space needed for pushing registers clobbered by us.  */
7608   HOST_WIDE_INT sa_size;
7609   /* Complete stack size needed.  */
7610   HOST_WIDE_INT frame_size;
7611   /* Offset from base reg to register save area.  */
7612   HOST_WIDE_INT reg_offset;
7613   int fp_is_frame_pointer, fp_offset;
7614   rtx sa_reg, sa_reg_exp = NULL;
7615   rtx sp_adj1, sp_adj2, mem;
7616   rtx eh_ofs;
7617   int i;
7618 
7619   sa_size = alpha_sa_size ();
7620 
7621   frame_size = get_frame_size ();
7622   if (TARGET_ABI_OPEN_VMS)
7623     frame_size = ALPHA_ROUND (sa_size
7624 			      + (alpha_procedure_type == PT_STACK ? 8 : 0)
7625 			      + frame_size
7626 			      + current_function_pretend_args_size);
7627   else if (TARGET_ABI_UNICOSMK)
7628     frame_size = ALPHA_ROUND (sa_size
7629 			      + (alpha_procedure_type == PT_STACK ? 48 : 0))
7630 		 + ALPHA_ROUND (frame_size
7631 			      + current_function_outgoing_args_size);
7632   else
7633     frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
7634 		  + sa_size
7635 		  + ALPHA_ROUND (frame_size
7636 				 + current_function_pretend_args_size));
7637 
7638   if (TARGET_ABI_OPEN_VMS)
7639     {
7640        if (alpha_procedure_type == PT_STACK)
7641           reg_offset = 8;
7642        else
7643           reg_offset = 0;
7644     }
7645   else
7646     reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
7647 
7648   alpha_sa_mask (&imask, &fmask);
7649 
7650   fp_is_frame_pointer
7651     = ((TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
7652        || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed));
7653   fp_offset = 0;
7654   sa_reg = stack_pointer_rtx;
7655 
7656   if (current_function_calls_eh_return)
7657     eh_ofs = EH_RETURN_STACKADJ_RTX;
7658   else
7659     eh_ofs = NULL_RTX;
7660 
7661   if (!TARGET_ABI_UNICOSMK && sa_size)
7662     {
7663       /* If we have a frame pointer, restore SP from it.  */
7664       if ((TARGET_ABI_OPEN_VMS
7665 	   && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
7666 	  || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed))
7667 	FRP (emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx));
7668 
7669       /* Cope with very large offsets to the register save area.  */
7670       if (reg_offset + sa_size > 0x8000)
7671 	{
7672 	  int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
7673 	  HOST_WIDE_INT bias;
7674 
7675 	  if (low + sa_size <= 0x8000)
7676 	    bias = reg_offset - low, reg_offset = low;
7677 	  else
7678 	    bias = reg_offset, reg_offset = 0;
7679 
7680 	  sa_reg = gen_rtx_REG (DImode, 22);
7681 	  sa_reg_exp = plus_constant (stack_pointer_rtx, bias);
7682 
7683 	  FRP (emit_move_insn (sa_reg, sa_reg_exp));
7684 	}
7685 
7686       /* Restore registers in order, excepting a true frame pointer.  */
7687 
7688       mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
7689       if (! eh_ofs)
7690         set_mem_alias_set (mem, alpha_sr_alias_set);
7691       FRP (emit_move_insn (gen_rtx_REG (DImode, REG_RA), mem));
7692 
7693       reg_offset += 8;
7694       imask &= ~(1L << REG_RA);
7695 
7696       for (i = 0; i < 31; ++i)
7697 	if (imask & (1L << i))
7698 	  {
7699 	    if (i == HARD_FRAME_POINTER_REGNUM && fp_is_frame_pointer)
7700 	      fp_offset = reg_offset;
7701 	    else
7702 	      {
7703 		mem = gen_rtx_MEM (DImode, plus_constant(sa_reg, reg_offset));
7704 		set_mem_alias_set (mem, alpha_sr_alias_set);
7705 		FRP (emit_move_insn (gen_rtx_REG (DImode, i), mem));
7706 	      }
7707 	    reg_offset += 8;
7708 	  }
7709 
7710       for (i = 0; i < 31; ++i)
7711 	if (fmask & (1L << i))
7712 	  {
7713 	    mem = gen_rtx_MEM (DFmode, plus_constant(sa_reg, reg_offset));
7714 	    set_mem_alias_set (mem, alpha_sr_alias_set);
7715 	    FRP (emit_move_insn (gen_rtx_REG (DFmode, i+32), mem));
7716 	    reg_offset += 8;
7717 	  }
7718     }
7719   else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
7720     {
7721       /* Restore callee-saved general-purpose registers.  */
7722 
7723       reg_offset = -56;
7724 
7725       for (i = 9; i < 15; i++)
7726 	if (imask & (1L << i))
7727 	  {
7728 	    mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx,
7729 						     reg_offset));
7730 	    set_mem_alias_set (mem, alpha_sr_alias_set);
7731 	    FRP (emit_move_insn (gen_rtx_REG (DImode, i), mem));
7732 	    reg_offset -= 8;
7733 	  }
7734 
7735       for (i = 2; i < 10; i++)
7736 	if (fmask & (1L << i))
7737 	  {
7738 	    mem = gen_rtx_MEM (DFmode, plus_constant(hard_frame_pointer_rtx,
7739 						     reg_offset));
7740 	    set_mem_alias_set (mem, alpha_sr_alias_set);
7741 	    FRP (emit_move_insn (gen_rtx_REG (DFmode, i+32), mem));
7742 	    reg_offset -= 8;
7743 	  }
7744 
7745       /* Restore the return address from the DSIB.  */
7746 
7747       mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx, -8));
7748       set_mem_alias_set (mem, alpha_sr_alias_set);
7749       FRP (emit_move_insn (gen_rtx_REG (DImode, REG_RA), mem));
7750     }
7751 
7752   if (frame_size || eh_ofs)
7753     {
7754       sp_adj1 = stack_pointer_rtx;
7755 
7756       if (eh_ofs)
7757 	{
7758 	  sp_adj1 = gen_rtx_REG (DImode, 23);
7759 	  emit_move_insn (sp_adj1,
7760 			  gen_rtx_PLUS (Pmode, stack_pointer_rtx, eh_ofs));
7761 	}
7762 
7763       /* If the stack size is large, begin computation into a temporary
7764 	 register so as not to interfere with a potential fp restore,
7765 	 which must be consecutive with an SP restore.  */
7766       if (frame_size < 32768
7767 	  && ! (TARGET_ABI_UNICOSMK && current_function_calls_alloca))
7768 	sp_adj2 = GEN_INT (frame_size);
7769       else if (TARGET_ABI_UNICOSMK)
7770 	{
7771 	  sp_adj1 = gen_rtx_REG (DImode, 23);
7772 	  FRP (emit_move_insn (sp_adj1, hard_frame_pointer_rtx));
7773 	  sp_adj2 = const0_rtx;
7774 	}
7775       else if (frame_size < 0x40007fffL)
7776 	{
7777 	  int low = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
7778 
7779 	  sp_adj2 = plus_constant (sp_adj1, frame_size - low);
7780 	  if (sa_reg_exp && rtx_equal_p (sa_reg_exp, sp_adj2))
7781 	    sp_adj1 = sa_reg;
7782 	  else
7783 	    {
7784 	      sp_adj1 = gen_rtx_REG (DImode, 23);
7785 	      FRP (emit_move_insn (sp_adj1, sp_adj2));
7786 	    }
7787 	  sp_adj2 = GEN_INT (low);
7788 	}
7789       else
7790 	{
7791 	  rtx tmp = gen_rtx_REG (DImode, 23);
7792 	  FRP (sp_adj2 = alpha_emit_set_const (tmp, DImode, frame_size, 3));
7793 	  if (!sp_adj2)
7794 	    {
7795 	      /* We can't drop new things to memory this late, afaik,
7796 		 so build it up by pieces.  */
7797 	      FRP (sp_adj2 = alpha_emit_set_long_const (tmp, frame_size,
7798 							-(frame_size < 0)));
7799 	      if (!sp_adj2)
7800 		abort ();
7801 	    }
7802 	}
7803 
7804       /* From now on, things must be in order.  So emit blockages.  */
7805 
7806       /* Restore the frame pointer.  */
7807       if (TARGET_ABI_UNICOSMK)
7808 	{
7809 	  emit_insn (gen_blockage ());
7810 	  mem = gen_rtx_MEM (DImode,
7811 			     plus_constant (hard_frame_pointer_rtx, -16));
7812 	  set_mem_alias_set (mem, alpha_sr_alias_set);
7813 	  FRP (emit_move_insn (hard_frame_pointer_rtx, mem));
7814 	}
7815       else if (fp_is_frame_pointer)
7816 	{
7817 	  emit_insn (gen_blockage ());
7818 	  mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, fp_offset));
7819 	  set_mem_alias_set (mem, alpha_sr_alias_set);
7820 	  FRP (emit_move_insn (hard_frame_pointer_rtx, mem));
7821 	}
7822       else if (TARGET_ABI_OPEN_VMS)
7823 	{
7824 	  emit_insn (gen_blockage ());
7825 	  FRP (emit_move_insn (hard_frame_pointer_rtx,
7826 			       gen_rtx_REG (DImode, vms_save_fp_regno)));
7827 	}
7828 
7829       /* Restore the stack pointer.  */
7830       emit_insn (gen_blockage ());
7831       if (sp_adj2 == const0_rtx)
7832 	FRP (emit_move_insn (stack_pointer_rtx, sp_adj1));
7833       else
7834 	FRP (emit_move_insn (stack_pointer_rtx,
7835 			     gen_rtx_PLUS (DImode, sp_adj1, sp_adj2)));
7836     }
7837   else
7838     {
7839       if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_REGISTER)
7840         {
7841           emit_insn (gen_blockage ());
7842           FRP (emit_move_insn (hard_frame_pointer_rtx,
7843 			       gen_rtx_REG (DImode, vms_save_fp_regno)));
7844         }
7845       else if (TARGET_ABI_UNICOSMK && alpha_procedure_type != PT_STACK)
7846 	{
7847 	  /* Decrement the frame pointer if the function does not have a
7848 	     frame.  */
7849 
7850 	  emit_insn (gen_blockage ());
7851 	  FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
7852 				      hard_frame_pointer_rtx, GEN_INT (-1))));
7853         }
7854     }
7855 }
7856 
7857 #if TARGET_ABI_OPEN_VMS
7858 #include <splay-tree.h>
7859 
7860 /* Structure to collect function names for final output
7861    in link section.  */
7862 
7863 enum links_kind {KIND_UNUSED, KIND_LOCAL, KIND_EXTERN};
7864 enum reloc_kind {KIND_LINKAGE, KIND_CODEADDR};
7865 
7866 struct alpha_funcs
7867 {
7868   int num;
7869   splay_tree links;
7870 };
7871 
7872 struct alpha_links
7873 {
7874   int num;
7875   rtx linkage;
7876   enum links_kind lkind;
7877   enum reloc_kind rkind;
7878 };
7879 
7880 static splay_tree alpha_funcs_tree;
7881 static splay_tree alpha_links_tree;
7882 
7883 static int mark_alpha_links_node	PARAMS ((splay_tree_node, void *));
7884 static void mark_alpha_links		PARAMS ((void *));
7885 static int alpha_write_one_linkage	PARAMS ((splay_tree_node, void *));
7886 
7887 static int alpha_funcs_num;
7888 #endif
7889 
7890 /* Output the rest of the textual info surrounding the epilogue.  */
7891 
7892 void
alpha_end_function(file,fnname,decl)7893 alpha_end_function (file, fnname, decl)
7894      FILE *file;
7895      const char *fnname;
7896      tree decl;
7897 {
7898   /* End the function.  */
7899   if (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive)
7900     {
7901       fputs ("\t.end ", file);
7902       assemble_name (file, fnname);
7903       putc ('\n', file);
7904     }
7905   inside_function = FALSE;
7906 
7907 #if TARGET_ABI_OPEN_VMS
7908   alpha_write_linkage (file, fnname, decl);
7909 #endif
7910 
7911   /* Show that we know this function if it is called again.
7912 
7913      Do this only for functions whose symbols bind locally.
7914 
7915      Don't do this for functions not defined in the .text section, as
7916      otherwise it's not unlikely that the destination is out of range
7917      for a direct branch.  */
7918 
7919   if ((*targetm.binds_local_p) (decl) && decl_in_text_section (decl))
7920     SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
7921 
7922   /* Output jump tables and the static subroutine information block.  */
7923   if (TARGET_ABI_UNICOSMK)
7924     {
7925       unicosmk_output_ssib (file, fnname);
7926       unicosmk_output_deferred_case_vectors (file);
7927     }
7928 }
7929 
7930 #if TARGET_ABI_OSF
7931 /* Emit a tail call to FUNCTION after adjusting THIS by DELTA.
7932 
7933    In order to avoid the hordes of differences between generated code
7934    with and without TARGET_EXPLICIT_RELOCS, and to avoid duplicating
7935    lots of code loading up large constants, generate rtl and emit it
7936    instead of going straight to text.
7937 
7938    Not sure why this idea hasn't been explored before...  */
7939 
7940 static void
alpha_output_mi_thunk_osf(file,thunk_fndecl,delta,vcall_offset,function)7941 alpha_output_mi_thunk_osf (file, thunk_fndecl, delta, vcall_offset, function)
7942      FILE *file;
7943      tree thunk_fndecl ATTRIBUTE_UNUSED;
7944      HOST_WIDE_INT delta;
7945      HOST_WIDE_INT vcall_offset;
7946      tree function;
7947 {
7948   HOST_WIDE_INT hi, lo;
7949   rtx this, insn, funexp;
7950 
7951   /* We always require a valid GP.  */
7952   emit_insn (gen_prologue_ldgp ());
7953   emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7954 
7955   /* Find the "this" pointer.  If the function returns a structure,
7956      the structure return pointer is in $16.  */
7957   if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
7958     this = gen_rtx_REG (Pmode, 17);
7959   else
7960     this = gen_rtx_REG (Pmode, 16);
7961 
7962   /* Add DELTA.  When possible we use ldah+lda.  Otherwise load the
7963      entire constant for the add.  */
7964   lo = ((delta & 0xffff) ^ 0x8000) - 0x8000;
7965   hi = (((delta - lo) & 0xffffffff) ^ 0x80000000) - 0x80000000;
7966   if (hi + lo == delta)
7967     {
7968       if (hi)
7969 	emit_insn (gen_adddi3 (this, this, GEN_INT (hi)));
7970       if (lo)
7971 	emit_insn (gen_adddi3 (this, this, GEN_INT (lo)));
7972     }
7973   else
7974     {
7975       rtx tmp = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 0),
7976 					   delta, -(delta < 0));
7977       emit_insn (gen_adddi3 (this, this, tmp));
7978     }
7979 
7980   /* Add a delta stored in the vtable at VCALL_OFFSET.  */
7981   if (vcall_offset)
7982     {
7983       rtx tmp, tmp2;
7984 
7985       tmp = gen_rtx_REG (Pmode, 0);
7986       emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
7987 
7988       lo = ((vcall_offset & 0xffff) ^ 0x8000) - 0x8000;
7989       hi = (((vcall_offset - lo) & 0xffffffff) ^ 0x80000000) - 0x80000000;
7990       if (hi + lo == vcall_offset)
7991 	{
7992 	  if (hi)
7993 	    emit_insn (gen_adddi3 (tmp, tmp, GEN_INT (hi)));
7994 	}
7995       else
7996 	{
7997 	  tmp2 = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 1),
7998 					    vcall_offset, -(vcall_offset < 0));
7999           emit_insn (gen_adddi3 (tmp, tmp, tmp2));
8000 	  lo = 0;
8001 	}
8002       if (lo)
8003 	tmp2 = gen_rtx_PLUS (Pmode, tmp, GEN_INT (lo));
8004       else
8005 	tmp2 = tmp;
8006       emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp2));
8007 
8008       emit_insn (gen_adddi3 (this, this, tmp));
8009     }
8010 
8011   /* Generate a tail call to the target function.  */
8012   if (! TREE_USED (function))
8013     {
8014       assemble_external (function);
8015       TREE_USED (function) = 1;
8016     }
8017   funexp = XEXP (DECL_RTL (function), 0);
8018   funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
8019   insn = emit_call_insn (gen_sibcall (funexp, const0_rtx));
8020   SIBLING_CALL_P (insn) = 1;
8021 
8022   /* Run just enough of rest_of_compilation to get the insns emitted.
8023      There's not really enough bulk here to make other passes such as
8024      instruction scheduling worth while.  Note that use_thunk calls
8025      assemble_start_function and assemble_end_function.  */
8026   insn = get_insns ();
8027   shorten_branches (insn);
8028   final_start_function (insn, file, 1);
8029   final (insn, file, 1, 0);
8030   final_end_function ();
8031 }
8032 #endif /* TARGET_ABI_OSF */
8033 
8034 /* Debugging support.  */
8035 
8036 #include "gstab.h"
8037 
8038 /* Count the number of sdb related labels are generated (to find block
8039    start and end boundaries).  */
8040 
8041 int sdb_label_count = 0;
8042 
8043 /* Next label # for each statement.  */
8044 
8045 static int sym_lineno = 0;
8046 
8047 /* Count the number of .file directives, so that .loc is up to date.  */
8048 
8049 static int num_source_filenames = 0;
8050 
8051 /* Name of the file containing the current function.  */
8052 
8053 static const char *current_function_file = "";
8054 
8055 /* Offsets to alpha virtual arg/local debugging pointers.  */
8056 
8057 long alpha_arg_offset;
8058 long alpha_auto_offset;
8059 
8060 /* Emit a new filename to a stream.  */
8061 
8062 void
alpha_output_filename(stream,name)8063 alpha_output_filename (stream, name)
8064      FILE *stream;
8065      const char *name;
8066 {
8067   static int first_time = TRUE;
8068   char ltext_label_name[100];
8069 
8070   if (first_time)
8071     {
8072       first_time = FALSE;
8073       ++num_source_filenames;
8074       current_function_file = name;
8075       fprintf (stream, "\t.file\t%d ", num_source_filenames);
8076       output_quoted_string (stream, name);
8077       fprintf (stream, "\n");
8078       if (!TARGET_GAS && write_symbols == DBX_DEBUG)
8079 	fprintf (stream, "\t#@stabs\n");
8080     }
8081 
8082   else if (write_symbols == DBX_DEBUG)
8083     {
8084       ASM_GENERATE_INTERNAL_LABEL (ltext_label_name, "Ltext", 0);
8085       fprintf (stream, "%s", ASM_STABS_OP);
8086       output_quoted_string (stream, name);
8087       fprintf (stream, ",%d,0,0,%s\n", N_SOL, &ltext_label_name[1]);
8088     }
8089 
8090   else if (name != current_function_file
8091 	   && strcmp (name, current_function_file) != 0)
8092     {
8093       if (inside_function && ! TARGET_GAS)
8094 	fprintf (stream, "\t#.file\t%d ", num_source_filenames);
8095       else
8096 	{
8097 	  ++num_source_filenames;
8098 	  current_function_file = name;
8099 	  fprintf (stream, "\t.file\t%d ", num_source_filenames);
8100 	}
8101 
8102       output_quoted_string (stream, name);
8103       fprintf (stream, "\n");
8104     }
8105 }
8106 
8107 /* Emit a linenumber to a stream.  */
8108 
8109 void
alpha_output_lineno(stream,line)8110 alpha_output_lineno (stream, line)
8111      FILE *stream;
8112      int line;
8113 {
8114   if (write_symbols == DBX_DEBUG)
8115     {
8116       /* mips-tfile doesn't understand .stabd directives.  */
8117       ++sym_lineno;
8118       fprintf (stream, "$LM%d:\n%s%d,0,%d,$LM%d\n",
8119 	       sym_lineno, ASM_STABN_OP, N_SLINE, line, sym_lineno);
8120     }
8121   else
8122     fprintf (stream, "\n\t.loc\t%d %d\n", num_source_filenames, line);
8123 }
8124 
8125 /* Structure to show the current status of registers and memory.  */
8126 
8127 struct shadow_summary
8128 {
8129   struct {
8130     unsigned int i     : 31;	/* Mask of int regs */
8131     unsigned int fp    : 31;	/* Mask of fp regs */
8132     unsigned int mem   :  1;	/* mem == imem | fpmem */
8133   } used, defd;
8134 };
8135 
8136 static void summarize_insn PARAMS ((rtx, struct shadow_summary *, int));
8137 static void alpha_handle_trap_shadows PARAMS ((rtx));
8138 
8139 /* Summary the effects of expression X on the machine.  Update SUM, a pointer
8140    to the summary structure.  SET is nonzero if the insn is setting the
8141    object, otherwise zero.  */
8142 
8143 static void
summarize_insn(x,sum,set)8144 summarize_insn (x, sum, set)
8145      rtx x;
8146      struct shadow_summary *sum;
8147      int set;
8148 {
8149   const char *format_ptr;
8150   int i, j;
8151 
8152   if (x == 0)
8153     return;
8154 
8155   switch (GET_CODE (x))
8156     {
8157       /* ??? Note that this case would be incorrect if the Alpha had a
8158 	 ZERO_EXTRACT in SET_DEST.  */
8159     case SET:
8160       summarize_insn (SET_SRC (x), sum, 0);
8161       summarize_insn (SET_DEST (x), sum, 1);
8162       break;
8163 
8164     case CLOBBER:
8165       summarize_insn (XEXP (x, 0), sum, 1);
8166       break;
8167 
8168     case USE:
8169       summarize_insn (XEXP (x, 0), sum, 0);
8170       break;
8171 
8172     case ASM_OPERANDS:
8173       for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
8174 	summarize_insn (ASM_OPERANDS_INPUT (x, i), sum, 0);
8175       break;
8176 
8177     case PARALLEL:
8178       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
8179 	summarize_insn (XVECEXP (x, 0, i), sum, 0);
8180       break;
8181 
8182     case SUBREG:
8183       summarize_insn (SUBREG_REG (x), sum, 0);
8184       break;
8185 
8186     case REG:
8187       {
8188 	int regno = REGNO (x);
8189 	unsigned long mask = ((unsigned long) 1) << (regno % 32);
8190 
8191 	if (regno == 31 || regno == 63)
8192 	  break;
8193 
8194 	if (set)
8195 	  {
8196 	    if (regno < 32)
8197 	      sum->defd.i |= mask;
8198 	    else
8199 	      sum->defd.fp |= mask;
8200 	  }
8201 	else
8202 	  {
8203 	    if (regno < 32)
8204 	      sum->used.i  |= mask;
8205 	    else
8206 	      sum->used.fp |= mask;
8207 	  }
8208 	}
8209       break;
8210 
8211     case MEM:
8212       if (set)
8213 	sum->defd.mem = 1;
8214       else
8215 	sum->used.mem = 1;
8216 
8217       /* Find the regs used in memory address computation: */
8218       summarize_insn (XEXP (x, 0), sum, 0);
8219       break;
8220 
8221     case CONST_INT:   case CONST_DOUBLE:
8222     case SYMBOL_REF:  case LABEL_REF:     case CONST:
8223     case SCRATCH:     case ASM_INPUT:
8224       break;
8225 
8226       /* Handle common unary and binary ops for efficiency.  */
8227     case COMPARE:  case PLUS:    case MINUS:   case MULT:      case DIV:
8228     case MOD:      case UDIV:    case UMOD:    case AND:       case IOR:
8229     case XOR:      case ASHIFT:  case ROTATE:  case ASHIFTRT:  case LSHIFTRT:
8230     case ROTATERT: case SMIN:    case SMAX:    case UMIN:      case UMAX:
8231     case NE:       case EQ:      case GE:      case GT:        case LE:
8232     case LT:       case GEU:     case GTU:     case LEU:       case LTU:
8233       summarize_insn (XEXP (x, 0), sum, 0);
8234       summarize_insn (XEXP (x, 1), sum, 0);
8235       break;
8236 
8237     case NEG:  case NOT:  case SIGN_EXTEND:  case ZERO_EXTEND:
8238     case TRUNCATE:  case FLOAT_EXTEND:  case FLOAT_TRUNCATE:  case FLOAT:
8239     case FIX:  case UNSIGNED_FLOAT:  case UNSIGNED_FIX:  case ABS:
8240     case SQRT:  case FFS:
8241       summarize_insn (XEXP (x, 0), sum, 0);
8242       break;
8243 
8244     default:
8245       format_ptr = GET_RTX_FORMAT (GET_CODE (x));
8246       for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
8247 	switch (format_ptr[i])
8248 	  {
8249 	  case 'e':
8250 	    summarize_insn (XEXP (x, i), sum, 0);
8251 	    break;
8252 
8253 	  case 'E':
8254 	    for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8255 	      summarize_insn (XVECEXP (x, i, j), sum, 0);
8256 	    break;
8257 
8258 	  case 'i':
8259 	    break;
8260 
8261 	  default:
8262 	    abort ();
8263 	  }
8264     }
8265 }
8266 
8267 /* Ensure a sufficient number of `trapb' insns are in the code when
8268    the user requests code with a trap precision of functions or
8269    instructions.
8270 
8271    In naive mode, when the user requests a trap-precision of
8272    "instruction", a trapb is needed after every instruction that may
8273    generate a trap.  This ensures that the code is resumption safe but
8274    it is also slow.
8275 
8276    When optimizations are turned on, we delay issuing a trapb as long
8277    as possible.  In this context, a trap shadow is the sequence of
8278    instructions that starts with a (potentially) trap generating
8279    instruction and extends to the next trapb or call_pal instruction
8280    (but GCC never generates call_pal by itself).  We can delay (and
8281    therefore sometimes omit) a trapb subject to the following
8282    conditions:
8283 
8284    (a) On entry to the trap shadow, if any Alpha register or memory
8285    location contains a value that is used as an operand value by some
8286    instruction in the trap shadow (live on entry), then no instruction
8287    in the trap shadow may modify the register or memory location.
8288 
8289    (b) Within the trap shadow, the computation of the base register
8290    for a memory load or store instruction may not involve using the
8291    result of an instruction that might generate an UNPREDICTABLE
8292    result.
8293 
8294    (c) Within the trap shadow, no register may be used more than once
8295    as a destination register.  (This is to make life easier for the
8296    trap-handler.)
8297 
8298    (d) The trap shadow may not include any branch instructions.  */
8299 
8300 static void
alpha_handle_trap_shadows(insns)8301 alpha_handle_trap_shadows (insns)
8302      rtx insns;
8303 {
8304   struct shadow_summary shadow;
8305   int trap_pending, exception_nesting;
8306   rtx i, n;
8307 
8308   trap_pending = 0;
8309   exception_nesting = 0;
8310   shadow.used.i = 0;
8311   shadow.used.fp = 0;
8312   shadow.used.mem = 0;
8313   shadow.defd = shadow.used;
8314 
8315   for (i = insns; i ; i = NEXT_INSN (i))
8316     {
8317       if (GET_CODE (i) == NOTE)
8318 	{
8319 	  switch (NOTE_LINE_NUMBER (i))
8320 	    {
8321 	    case NOTE_INSN_EH_REGION_BEG:
8322 	      exception_nesting++;
8323 	      if (trap_pending)
8324 		goto close_shadow;
8325 	      break;
8326 
8327 	    case NOTE_INSN_EH_REGION_END:
8328 	      exception_nesting--;
8329 	      if (trap_pending)
8330 		goto close_shadow;
8331 	      break;
8332 
8333 	    case NOTE_INSN_EPILOGUE_BEG:
8334 	      if (trap_pending && alpha_tp >= ALPHA_TP_FUNC)
8335 		goto close_shadow;
8336 	      break;
8337 	    }
8338 	}
8339       else if (trap_pending)
8340 	{
8341 	  if (alpha_tp == ALPHA_TP_FUNC)
8342 	    {
8343 	      if (GET_CODE (i) == JUMP_INSN
8344 		  && GET_CODE (PATTERN (i)) == RETURN)
8345 		goto close_shadow;
8346 	    }
8347 	  else if (alpha_tp == ALPHA_TP_INSN)
8348 	    {
8349 	      if (optimize > 0)
8350 		{
8351 		  struct shadow_summary sum;
8352 
8353 		  sum.used.i = 0;
8354 		  sum.used.fp = 0;
8355 		  sum.used.mem = 0;
8356 		  sum.defd = sum.used;
8357 
8358 		  switch (GET_CODE (i))
8359 		    {
8360 		    case INSN:
8361 		      /* Annoyingly, get_attr_trap will abort on these.  */
8362 		      if (GET_CODE (PATTERN (i)) == USE
8363 			  || GET_CODE (PATTERN (i)) == CLOBBER)
8364 			break;
8365 
8366 		      summarize_insn (PATTERN (i), &sum, 0);
8367 
8368 		      if ((sum.defd.i & shadow.defd.i)
8369 			  || (sum.defd.fp & shadow.defd.fp))
8370 			{
8371 			  /* (c) would be violated */
8372 			  goto close_shadow;
8373 			}
8374 
8375 		      /* Combine shadow with summary of current insn: */
8376 		      shadow.used.i   |= sum.used.i;
8377 		      shadow.used.fp  |= sum.used.fp;
8378 		      shadow.used.mem |= sum.used.mem;
8379 		      shadow.defd.i   |= sum.defd.i;
8380 		      shadow.defd.fp  |= sum.defd.fp;
8381 		      shadow.defd.mem |= sum.defd.mem;
8382 
8383 		      if ((sum.defd.i & shadow.used.i)
8384 			  || (sum.defd.fp & shadow.used.fp)
8385 			  || (sum.defd.mem & shadow.used.mem))
8386 			{
8387 			  /* (a) would be violated (also takes care of (b))  */
8388 			  if (get_attr_trap (i) == TRAP_YES
8389 			      && ((sum.defd.i & sum.used.i)
8390 				  || (sum.defd.fp & sum.used.fp)))
8391 			    abort ();
8392 
8393 			  goto close_shadow;
8394 			}
8395 		      break;
8396 
8397 		    case JUMP_INSN:
8398 		    case CALL_INSN:
8399 		    case CODE_LABEL:
8400 		      goto close_shadow;
8401 
8402 		    default:
8403 		      abort ();
8404 		    }
8405 		}
8406 	      else
8407 		{
8408 		close_shadow:
8409 		  n = emit_insn_before (gen_trapb (), i);
8410 		  PUT_MODE (n, TImode);
8411 		  PUT_MODE (i, TImode);
8412 		  trap_pending = 0;
8413 		  shadow.used.i = 0;
8414 		  shadow.used.fp = 0;
8415 		  shadow.used.mem = 0;
8416 		  shadow.defd = shadow.used;
8417 		}
8418 	    }
8419 	}
8420 
8421       if ((exception_nesting > 0 || alpha_tp >= ALPHA_TP_FUNC)
8422 	  && GET_CODE (i) == INSN
8423 	  && GET_CODE (PATTERN (i)) != USE
8424 	  && GET_CODE (PATTERN (i)) != CLOBBER
8425 	  && get_attr_trap (i) == TRAP_YES)
8426 	{
8427 	  if (optimize && !trap_pending)
8428 	    summarize_insn (PATTERN (i), &shadow, 0);
8429 	  trap_pending = 1;
8430 	}
8431     }
8432 }
8433 
8434 /* Alpha can only issue instruction groups simultaneously if they are
8435    suitibly aligned.  This is very processor-specific.  */
8436 
8437 enum alphaev4_pipe {
8438   EV4_STOP = 0,
8439   EV4_IB0 = 1,
8440   EV4_IB1 = 2,
8441   EV4_IBX = 4
8442 };
8443 
8444 enum alphaev5_pipe {
8445   EV5_STOP = 0,
8446   EV5_NONE = 1,
8447   EV5_E01 = 2,
8448   EV5_E0 = 4,
8449   EV5_E1 = 8,
8450   EV5_FAM = 16,
8451   EV5_FA = 32,
8452   EV5_FM = 64
8453 };
8454 
8455 static enum alphaev4_pipe alphaev4_insn_pipe PARAMS ((rtx));
8456 static enum alphaev5_pipe alphaev5_insn_pipe PARAMS ((rtx));
8457 static rtx alphaev4_next_group PARAMS ((rtx, int *, int *));
8458 static rtx alphaev5_next_group PARAMS ((rtx, int *, int *));
8459 static rtx alphaev4_next_nop PARAMS ((int *));
8460 static rtx alphaev5_next_nop PARAMS ((int *));
8461 
8462 static void alpha_align_insns
8463   PARAMS ((rtx, unsigned int, rtx (*)(rtx, int *, int *), rtx (*)(int *)));
8464 
8465 static enum alphaev4_pipe
alphaev4_insn_pipe(insn)8466 alphaev4_insn_pipe (insn)
8467      rtx insn;
8468 {
8469   if (recog_memoized (insn) < 0)
8470     return EV4_STOP;
8471   if (get_attr_length (insn) != 4)
8472     return EV4_STOP;
8473 
8474   switch (get_attr_type (insn))
8475     {
8476     case TYPE_ILD:
8477     case TYPE_FLD:
8478       return EV4_IBX;
8479 
8480     case TYPE_LDSYM:
8481     case TYPE_IADD:
8482     case TYPE_ILOG:
8483     case TYPE_ICMOV:
8484     case TYPE_ICMP:
8485     case TYPE_IST:
8486     case TYPE_FST:
8487     case TYPE_SHIFT:
8488     case TYPE_IMUL:
8489     case TYPE_FBR:
8490       return EV4_IB0;
8491 
8492     case TYPE_MISC:
8493     case TYPE_IBR:
8494     case TYPE_JSR:
8495     case TYPE_CALLPAL:
8496     case TYPE_FCPYS:
8497     case TYPE_FCMOV:
8498     case TYPE_FADD:
8499     case TYPE_FDIV:
8500     case TYPE_FMUL:
8501       return EV4_IB1;
8502 
8503     default:
8504       abort ();
8505     }
8506 }
8507 
8508 static enum alphaev5_pipe
alphaev5_insn_pipe(insn)8509 alphaev5_insn_pipe (insn)
8510      rtx insn;
8511 {
8512   if (recog_memoized (insn) < 0)
8513     return EV5_STOP;
8514   if (get_attr_length (insn) != 4)
8515     return EV5_STOP;
8516 
8517   switch (get_attr_type (insn))
8518     {
8519     case TYPE_ILD:
8520     case TYPE_FLD:
8521     case TYPE_LDSYM:
8522     case TYPE_IADD:
8523     case TYPE_ILOG:
8524     case TYPE_ICMOV:
8525     case TYPE_ICMP:
8526       return EV5_E01;
8527 
8528     case TYPE_IST:
8529     case TYPE_FST:
8530     case TYPE_SHIFT:
8531     case TYPE_IMUL:
8532     case TYPE_MISC:
8533     case TYPE_MVI:
8534       return EV5_E0;
8535 
8536     case TYPE_IBR:
8537     case TYPE_JSR:
8538     case TYPE_CALLPAL:
8539       return EV5_E1;
8540 
8541     case TYPE_FCPYS:
8542       return EV5_FAM;
8543 
8544     case TYPE_FBR:
8545     case TYPE_FCMOV:
8546     case TYPE_FADD:
8547     case TYPE_FDIV:
8548       return EV5_FA;
8549 
8550     case TYPE_FMUL:
8551       return EV5_FM;
8552 
8553     default:
8554       abort();
8555     }
8556 }
8557 
8558 /* IN_USE is a mask of the slots currently filled within the insn group.
8559    The mask bits come from alphaev4_pipe above.  If EV4_IBX is set, then
8560    the insn in EV4_IB0 can be swapped by the hardware into EV4_IB1.
8561 
8562    LEN is, of course, the length of the group in bytes.  */
8563 
8564 static rtx
alphaev4_next_group(insn,pin_use,plen)8565 alphaev4_next_group (insn, pin_use, plen)
8566      rtx insn;
8567      int *pin_use, *plen;
8568 {
8569   int len, in_use;
8570 
8571   len = in_use = 0;
8572 
8573   if (! INSN_P (insn)
8574       || GET_CODE (PATTERN (insn)) == CLOBBER
8575       || GET_CODE (PATTERN (insn)) == USE)
8576     goto next_and_done;
8577 
8578   while (1)
8579     {
8580       enum alphaev4_pipe pipe;
8581 
8582       pipe = alphaev4_insn_pipe (insn);
8583       switch (pipe)
8584 	{
8585 	case EV4_STOP:
8586 	  /* Force complex instructions to start new groups.  */
8587 	  if (in_use)
8588 	    goto done;
8589 
8590 	  /* If this is a completely unrecognized insn, its an asm.
8591 	     We don't know how long it is, so record length as -1 to
8592 	     signal a needed realignment.  */
8593 	  if (recog_memoized (insn) < 0)
8594 	    len = -1;
8595 	  else
8596 	    len = get_attr_length (insn);
8597 	  goto next_and_done;
8598 
8599 	case EV4_IBX:
8600 	  if (in_use & EV4_IB0)
8601 	    {
8602 	      if (in_use & EV4_IB1)
8603 		goto done;
8604 	      in_use |= EV4_IB1;
8605 	    }
8606 	  else
8607 	    in_use |= EV4_IB0 | EV4_IBX;
8608 	  break;
8609 
8610 	case EV4_IB0:
8611 	  if (in_use & EV4_IB0)
8612 	    {
8613 	      if (!(in_use & EV4_IBX) || (in_use & EV4_IB1))
8614 		goto done;
8615 	      in_use |= EV4_IB1;
8616 	    }
8617 	  in_use |= EV4_IB0;
8618 	  break;
8619 
8620 	case EV4_IB1:
8621 	  if (in_use & EV4_IB1)
8622 	    goto done;
8623 	  in_use |= EV4_IB1;
8624 	  break;
8625 
8626 	default:
8627 	  abort();
8628 	}
8629       len += 4;
8630 
8631       /* Haifa doesn't do well scheduling branches.  */
8632       if (GET_CODE (insn) == JUMP_INSN)
8633 	goto next_and_done;
8634 
8635     next:
8636       insn = next_nonnote_insn (insn);
8637 
8638       if (!insn || ! INSN_P (insn))
8639 	goto done;
8640 
8641       /* Let Haifa tell us where it thinks insn group boundaries are.  */
8642       if (GET_MODE (insn) == TImode)
8643 	goto done;
8644 
8645       if (GET_CODE (insn) == CLOBBER || GET_CODE (insn) == USE)
8646 	goto next;
8647     }
8648 
8649  next_and_done:
8650   insn = next_nonnote_insn (insn);
8651 
8652  done:
8653   *plen = len;
8654   *pin_use = in_use;
8655   return insn;
8656 }
8657 
8658 /* IN_USE is a mask of the slots currently filled within the insn group.
8659    The mask bits come from alphaev5_pipe above.  If EV5_E01 is set, then
8660    the insn in EV5_E0 can be swapped by the hardware into EV5_E1.
8661 
8662    LEN is, of course, the length of the group in bytes.  */
8663 
8664 static rtx
alphaev5_next_group(insn,pin_use,plen)8665 alphaev5_next_group (insn, pin_use, plen)
8666      rtx insn;
8667      int *pin_use, *plen;
8668 {
8669   int len, in_use;
8670 
8671   len = in_use = 0;
8672 
8673   if (! INSN_P (insn)
8674       || GET_CODE (PATTERN (insn)) == CLOBBER
8675       || GET_CODE (PATTERN (insn)) == USE)
8676     goto next_and_done;
8677 
8678   while (1)
8679     {
8680       enum alphaev5_pipe pipe;
8681 
8682       pipe = alphaev5_insn_pipe (insn);
8683       switch (pipe)
8684 	{
8685 	case EV5_STOP:
8686 	  /* Force complex instructions to start new groups.  */
8687 	  if (in_use)
8688 	    goto done;
8689 
8690 	  /* If this is a completely unrecognized insn, its an asm.
8691 	     We don't know how long it is, so record length as -1 to
8692 	     signal a needed realignment.  */
8693 	  if (recog_memoized (insn) < 0)
8694 	    len = -1;
8695 	  else
8696 	    len = get_attr_length (insn);
8697 	  goto next_and_done;
8698 
8699 	/* ??? Most of the places below, we would like to abort, as
8700 	   it would indicate an error either in Haifa, or in the
8701 	   scheduling description.  Unfortunately, Haifa never
8702 	   schedules the last instruction of the BB, so we don't
8703 	   have an accurate TI bit to go off.  */
8704 	case EV5_E01:
8705 	  if (in_use & EV5_E0)
8706 	    {
8707 	      if (in_use & EV5_E1)
8708 		goto done;
8709 	      in_use |= EV5_E1;
8710 	    }
8711 	  else
8712 	    in_use |= EV5_E0 | EV5_E01;
8713 	  break;
8714 
8715 	case EV5_E0:
8716 	  if (in_use & EV5_E0)
8717 	    {
8718 	      if (!(in_use & EV5_E01) || (in_use & EV5_E1))
8719 		goto done;
8720 	      in_use |= EV5_E1;
8721 	    }
8722 	  in_use |= EV5_E0;
8723 	  break;
8724 
8725 	case EV5_E1:
8726 	  if (in_use & EV5_E1)
8727 	    goto done;
8728 	  in_use |= EV5_E1;
8729 	  break;
8730 
8731 	case EV5_FAM:
8732 	  if (in_use & EV5_FA)
8733 	    {
8734 	      if (in_use & EV5_FM)
8735 		goto done;
8736 	      in_use |= EV5_FM;
8737 	    }
8738 	  else
8739 	    in_use |= EV5_FA | EV5_FAM;
8740 	  break;
8741 
8742 	case EV5_FA:
8743 	  if (in_use & EV5_FA)
8744 	    goto done;
8745 	  in_use |= EV5_FA;
8746 	  break;
8747 
8748 	case EV5_FM:
8749 	  if (in_use & EV5_FM)
8750 	    goto done;
8751 	  in_use |= EV5_FM;
8752 	  break;
8753 
8754 	case EV5_NONE:
8755 	  break;
8756 
8757 	default:
8758 	  abort();
8759 	}
8760       len += 4;
8761 
8762       /* Haifa doesn't do well scheduling branches.  */
8763       /* ??? If this is predicted not-taken, slotting continues, except
8764 	 that no more IBR, FBR, or JSR insns may be slotted.  */
8765       if (GET_CODE (insn) == JUMP_INSN)
8766 	goto next_and_done;
8767 
8768     next:
8769       insn = next_nonnote_insn (insn);
8770 
8771       if (!insn || ! INSN_P (insn))
8772 	goto done;
8773 
8774       /* Let Haifa tell us where it thinks insn group boundaries are.  */
8775       if (GET_MODE (insn) == TImode)
8776 	goto done;
8777 
8778       if (GET_CODE (insn) == CLOBBER || GET_CODE (insn) == USE)
8779 	goto next;
8780     }
8781 
8782  next_and_done:
8783   insn = next_nonnote_insn (insn);
8784 
8785  done:
8786   *plen = len;
8787   *pin_use = in_use;
8788   return insn;
8789 }
8790 
8791 static rtx
alphaev4_next_nop(pin_use)8792 alphaev4_next_nop (pin_use)
8793      int *pin_use;
8794 {
8795   int in_use = *pin_use;
8796   rtx nop;
8797 
8798   if (!(in_use & EV4_IB0))
8799     {
8800       in_use |= EV4_IB0;
8801       nop = gen_nop ();
8802     }
8803   else if ((in_use & (EV4_IBX|EV4_IB1)) == EV4_IBX)
8804     {
8805       in_use |= EV4_IB1;
8806       nop = gen_nop ();
8807     }
8808   else if (TARGET_FP && !(in_use & EV4_IB1))
8809     {
8810       in_use |= EV4_IB1;
8811       nop = gen_fnop ();
8812     }
8813   else
8814     nop = gen_unop ();
8815 
8816   *pin_use = in_use;
8817   return nop;
8818 }
8819 
8820 static rtx
alphaev5_next_nop(pin_use)8821 alphaev5_next_nop (pin_use)
8822      int *pin_use;
8823 {
8824   int in_use = *pin_use;
8825   rtx nop;
8826 
8827   if (!(in_use & EV5_E1))
8828     {
8829       in_use |= EV5_E1;
8830       nop = gen_nop ();
8831     }
8832   else if (TARGET_FP && !(in_use & EV5_FA))
8833     {
8834       in_use |= EV5_FA;
8835       nop = gen_fnop ();
8836     }
8837   else if (TARGET_FP && !(in_use & EV5_FM))
8838     {
8839       in_use |= EV5_FM;
8840       nop = gen_fnop ();
8841     }
8842   else
8843     nop = gen_unop ();
8844 
8845   *pin_use = in_use;
8846   return nop;
8847 }
8848 
8849 /* The instruction group alignment main loop.  */
8850 
8851 static void
alpha_align_insns(insns,max_align,next_group,next_nop)8852 alpha_align_insns (insns, max_align, next_group, next_nop)
8853      rtx insns;
8854      unsigned int max_align;
8855      rtx (*next_group) PARAMS ((rtx, int *, int *));
8856      rtx (*next_nop) PARAMS ((int *));
8857 {
8858   /* ALIGN is the known alignment for the insn group.  */
8859   unsigned int align;
8860   /* OFS is the offset of the current insn in the insn group.  */
8861   int ofs;
8862   int prev_in_use, in_use, len;
8863   rtx i, next;
8864 
8865   /* Let shorten branches care for assigning alignments to code labels.  */
8866   shorten_branches (insns);
8867 
8868   if (align_functions < 4)
8869     align = 4;
8870   else if ((unsigned int) align_functions < max_align)
8871     align = align_functions;
8872   else
8873     align = max_align;
8874 
8875   ofs = prev_in_use = 0;
8876   i = insns;
8877   if (GET_CODE (i) == NOTE)
8878     i = next_nonnote_insn (i);
8879 
8880   while (i)
8881     {
8882       next = (*next_group) (i, &in_use, &len);
8883 
8884       /* When we see a label, resync alignment etc.  */
8885       if (GET_CODE (i) == CODE_LABEL)
8886 	{
8887 	  unsigned int new_align = 1 << label_to_alignment (i);
8888 
8889 	  if (new_align >= align)
8890 	    {
8891 	      align = new_align < max_align ? new_align : max_align;
8892 	      ofs = 0;
8893 	    }
8894 
8895 	  else if (ofs & (new_align-1))
8896 	    ofs = (ofs | (new_align-1)) + 1;
8897 	  if (len != 0)
8898 	    abort();
8899 	}
8900 
8901       /* Handle complex instructions special.  */
8902       else if (in_use == 0)
8903 	{
8904 	  /* Asms will have length < 0.  This is a signal that we have
8905 	     lost alignment knowledge.  Assume, however, that the asm
8906 	     will not mis-align instructions.  */
8907 	  if (len < 0)
8908 	    {
8909 	      ofs = 0;
8910 	      align = 4;
8911 	      len = 0;
8912 	    }
8913 	}
8914 
8915       /* If the known alignment is smaller than the recognized insn group,
8916 	 realign the output.  */
8917       else if ((int) align < len)
8918 	{
8919 	  unsigned int new_log_align = len > 8 ? 4 : 3;
8920 	  rtx prev, where;
8921 
8922 	  where = prev = prev_nonnote_insn (i);
8923 	  if (!where || GET_CODE (where) != CODE_LABEL)
8924 	    where = i;
8925 
8926 	  /* Can't realign between a call and its gp reload.  */
8927 	  if (! (TARGET_EXPLICIT_RELOCS
8928 		 && prev && GET_CODE (prev) == CALL_INSN))
8929 	    {
8930 	      emit_insn_before (gen_realign (GEN_INT (new_log_align)), where);
8931 	      align = 1 << new_log_align;
8932 	      ofs = 0;
8933 	    }
8934 	}
8935 
8936       /* If the group won't fit in the same INT16 as the previous,
8937 	 we need to add padding to keep the group together.  Rather
8938 	 than simply leaving the insn filling to the assembler, we
8939 	 can make use of the knowledge of what sorts of instructions
8940 	 were issued in the previous group to make sure that all of
8941 	 the added nops are really free.  */
8942       else if (ofs + len > (int) align)
8943 	{
8944 	  int nop_count = (align - ofs) / 4;
8945 	  rtx where;
8946 
8947 	  /* Insert nops before labels, branches, and calls to truely merge
8948 	     the execution of the nops with the previous instruction group.  */
8949 	  where = prev_nonnote_insn (i);
8950 	  if (where)
8951 	    {
8952 	      if (GET_CODE (where) == CODE_LABEL)
8953 		{
8954 		  rtx where2 = prev_nonnote_insn (where);
8955 		  if (where2 && GET_CODE (where2) == JUMP_INSN)
8956 		    where = where2;
8957 		}
8958 	      else if (GET_CODE (where) == INSN)
8959 		where = i;
8960 	    }
8961 	  else
8962 	    where = i;
8963 
8964 	  do
8965 	    emit_insn_before ((*next_nop)(&prev_in_use), where);
8966 	  while (--nop_count);
8967 	  ofs = 0;
8968 	}
8969 
8970       ofs = (ofs + len) & (align - 1);
8971       prev_in_use = in_use;
8972       i = next;
8973     }
8974 }
8975 
8976 /* Machine dependent reorg pass.  */
8977 
8978 void
alpha_reorg(insns)8979 alpha_reorg (insns)
8980      rtx insns;
8981 {
8982   if (alpha_tp != ALPHA_TP_PROG || flag_exceptions)
8983     alpha_handle_trap_shadows (insns);
8984 
8985   /* Due to the number of extra trapb insns, don't bother fixing up
8986      alignment when trap precision is instruction.  Moreover, we can
8987      only do our job when sched2 is run.  */
8988   if (optimize && !optimize_size
8989       && alpha_tp != ALPHA_TP_INSN
8990       && flag_schedule_insns_after_reload)
8991     {
8992       if (alpha_cpu == PROCESSOR_EV4)
8993 	alpha_align_insns (insns, 8, alphaev4_next_group, alphaev4_next_nop);
8994       else if (alpha_cpu == PROCESSOR_EV5)
8995 	alpha_align_insns (insns, 16, alphaev5_next_group, alphaev5_next_nop);
8996     }
8997 }
8998 
8999 #ifdef OBJECT_FORMAT_ELF
9000 
9001 /* Switch to the section to which we should output X.  The only thing
9002    special we do here is to honor small data.  */
9003 
9004 static void
alpha_elf_select_rtx_section(mode,x,align)9005 alpha_elf_select_rtx_section (mode, x, align)
9006      enum machine_mode mode;
9007      rtx x;
9008      unsigned HOST_WIDE_INT align;
9009 {
9010   if (TARGET_SMALL_DATA && GET_MODE_SIZE (mode) <= g_switch_value)
9011     /* ??? Consider using mergable sdata sections.  */
9012     sdata_section ();
9013   else
9014     default_elf_select_rtx_section (mode, x, align);
9015 }
9016 
9017 #endif /* OBJECT_FORMAT_ELF */
9018 
9019 #if TARGET_ABI_OPEN_VMS
9020 
9021 /* Return the VMS argument type corresponding to MODE.  */
9022 
9023 enum avms_arg_type
alpha_arg_type(mode)9024 alpha_arg_type (mode)
9025      enum machine_mode mode;
9026 {
9027   switch (mode)
9028     {
9029     case SFmode:
9030       return TARGET_FLOAT_VAX ? FF : FS;
9031     case DFmode:
9032       return TARGET_FLOAT_VAX ? FD : FT;
9033     default:
9034       return I64;
9035     }
9036 }
9037 
9038 /* Return an rtx for an integer representing the VMS Argument Information
9039    register value.  */
9040 
9041 rtx
alpha_arg_info_reg_val(cum)9042 alpha_arg_info_reg_val (cum)
9043      CUMULATIVE_ARGS cum;
9044 {
9045   unsigned HOST_WIDE_INT regval = cum.num_args;
9046   int i;
9047 
9048   for (i = 0; i < 6; i++)
9049     regval |= ((int) cum.atypes[i]) << (i * 3 + 8);
9050 
9051   return GEN_INT (regval);
9052 }
9053 
9054 /* Protect alpha_links from garbage collection.  */
9055 
9056 static int
mark_alpha_links_node(node,data)9057 mark_alpha_links_node (node, data)
9058      splay_tree_node node;
9059      void *data ATTRIBUTE_UNUSED;
9060 {
9061   struct alpha_links *links = (struct alpha_links *) node->value;
9062   ggc_mark_rtx (links->linkage);
9063   return 0;
9064 }
9065 
9066 static void
mark_alpha_links(ptr)9067 mark_alpha_links (ptr)
9068      void *ptr;
9069 {
9070   splay_tree tree = *(splay_tree *) ptr;
9071   splay_tree_foreach (tree, mark_alpha_links_node, NULL);
9072 }
9073 
9074 /* Make (or fake) .linkage entry for function call.
9075 
9076    IS_LOCAL is 0 if name is used in call, 1 if name is used in definition.
9077 
9078    Return an SYMBOL_REF rtx for the linkage.  */
9079 
9080 rtx
alpha_need_linkage(name,is_local)9081 alpha_need_linkage (name, is_local)
9082     const char *name;
9083     int is_local;
9084 {
9085   splay_tree_node node;
9086   struct alpha_links *al;
9087   struct alpha_funcs *cfaf;
9088 
9089   if (name[0] == '*')
9090     name++;
9091 
9092   if (is_local)
9093     {
9094       alpha_funcs_tree = splay_tree_new
9095 	((splay_tree_compare_fn) splay_tree_compare_pointers,
9096 	 (splay_tree_delete_key_fn) free,
9097 	 (splay_tree_delete_key_fn) free);
9098 
9099       cfaf = (struct alpha_funcs *) xmalloc (sizeof (struct alpha_funcs));
9100 
9101       cfaf->links = 0;
9102       cfaf->num = ++alpha_funcs_num;
9103 
9104       splay_tree_insert (alpha_funcs_tree,
9105 			 (splay_tree_key) current_function_decl,
9106 			 (splay_tree_value) cfaf);
9107 
9108     }
9109 
9110   if (alpha_links_tree)
9111     {
9112       /* Is this name already defined?  */
9113 
9114       node = splay_tree_lookup (alpha_links_tree, (splay_tree_key) name);
9115       if (node)
9116 	{
9117 	  al = (struct alpha_links *) node->value;
9118 	  if (is_local)
9119 	    {
9120 	      /* Defined here but external assumed.  */
9121 	      if (al->lkind == KIND_EXTERN)
9122 		al->lkind = KIND_LOCAL;
9123 	    }
9124 	  else
9125 	    {
9126 	      /* Used here but unused assumed.  */
9127 	      if (al->lkind == KIND_UNUSED)
9128 		al->lkind = KIND_LOCAL;
9129 	    }
9130 	  return al->linkage;
9131 	}
9132     }
9133   else
9134     {
9135       alpha_links_tree = splay_tree_new
9136 	((splay_tree_compare_fn) strcmp,
9137 	 (splay_tree_delete_key_fn) free,
9138 	 (splay_tree_delete_key_fn) free);
9139 
9140       ggc_add_root (&alpha_links_tree, 1, 1, mark_alpha_links);
9141     }
9142 
9143   al = (struct alpha_links *) xmalloc (sizeof (struct alpha_links));
9144   name = xstrdup (name);
9145 
9146   /* Assume external if no definition.  */
9147   al->lkind = (is_local ? KIND_UNUSED : KIND_EXTERN);
9148 
9149   /* Ensure we have an IDENTIFIER so assemble_name can mark it used.  */
9150   get_identifier (name);
9151 
9152   /* Construct a SYMBOL_REF for us to call.  */
9153   {
9154     size_t name_len = strlen (name);
9155     char *linksym = alloca (name_len + 6);
9156     linksym[0] = '$';
9157     memcpy (linksym + 1, name, name_len);
9158     memcpy (linksym + 1 + name_len, "..lk", 5);
9159     al->linkage = gen_rtx_SYMBOL_REF (Pmode,
9160 				      ggc_alloc_string (linksym, name_len + 5));
9161   }
9162 
9163   splay_tree_insert (alpha_links_tree, (splay_tree_key) name,
9164 		     (splay_tree_value) al);
9165 
9166   return al->linkage;
9167 }
9168 
9169 rtx
alpha_use_linkage(linkage,cfundecl,lflag,rflag)9170 alpha_use_linkage (linkage, cfundecl, lflag, rflag)
9171      rtx linkage;
9172      tree cfundecl;
9173      int lflag;
9174      int rflag;
9175 {
9176   splay_tree_node cfunnode;
9177   struct alpha_funcs *cfaf;
9178   struct alpha_links *al;
9179   const char *name = XSTR (linkage, 0);
9180 
9181   cfaf = (struct alpha_funcs *) 0;
9182   al = (struct alpha_links *) 0;
9183 
9184   cfunnode = splay_tree_lookup (alpha_funcs_tree, (splay_tree_key) cfundecl);
9185   cfaf = (struct alpha_funcs *) cfunnode->value;
9186 
9187   if (cfaf->links)
9188     {
9189       splay_tree_node lnode;
9190 
9191       /* Is this name already defined?  */
9192 
9193       lnode = splay_tree_lookup (cfaf->links, (splay_tree_key) name);
9194       if (lnode)
9195 	al = (struct alpha_links *) lnode->value;
9196     }
9197   else
9198     {
9199       cfaf->links = splay_tree_new
9200 	((splay_tree_compare_fn) strcmp,
9201 	 (splay_tree_delete_key_fn) free,
9202 	 (splay_tree_delete_key_fn) free);
9203       ggc_add_root (&cfaf->links, 1, 1, mark_alpha_links);
9204     }
9205 
9206   if (!al)
9207     {
9208       size_t name_len;
9209       size_t buflen;
9210       char buf [512];
9211       char *linksym;
9212       splay_tree_node node = 0;
9213       struct alpha_links *anl;
9214 
9215       if (name[0] == '*')
9216 	name++;
9217 
9218       name_len = strlen (name);
9219 
9220       al = (struct alpha_links *) xmalloc (sizeof (struct alpha_links));
9221       al->num = cfaf->num;
9222 
9223       node = splay_tree_lookup (alpha_links_tree, (splay_tree_key) name);
9224       if (node)
9225 	{
9226 	  anl = (struct alpha_links *) node->value;
9227 	  al->lkind = anl->lkind;
9228 	}
9229 
9230       sprintf (buf, "$%d..%s..lk", cfaf->num, name);
9231       buflen = strlen (buf);
9232       linksym = alloca (buflen + 1);
9233       memcpy (linksym, buf, buflen + 1);
9234 
9235       al->linkage = gen_rtx_SYMBOL_REF
9236 	(Pmode, ggc_alloc_string (linksym, buflen + 1));
9237 
9238       splay_tree_insert (cfaf->links, (splay_tree_key) name,
9239 			 (splay_tree_value) al);
9240     }
9241 
9242   if (rflag)
9243     al->rkind = KIND_CODEADDR;
9244   else
9245     al->rkind = KIND_LINKAGE;
9246 
9247   if (lflag)
9248     return gen_rtx_MEM (Pmode, plus_constant (al->linkage, 8));
9249   else
9250     return al->linkage;
9251 }
9252 
9253 static int
alpha_write_one_linkage(node,data)9254 alpha_write_one_linkage (node, data)
9255      splay_tree_node node;
9256      void *data;
9257 {
9258   const char *const name = (const char *) node->key;
9259   struct alpha_links *link = (struct alpha_links *) node->value;
9260   FILE *stream = (FILE *) data;
9261 
9262   fprintf (stream, "$%d..%s..lk:\n", link->num, name);
9263   if (link->rkind == KIND_CODEADDR)
9264     {
9265       if (link->lkind == KIND_LOCAL)
9266 	{
9267 	  /* Local and used */
9268 	  fprintf (stream, "\t.quad %s..en\n", name);
9269 	}
9270       else
9271 	{
9272 	  /* External and used, request code address.  */
9273 	  fprintf (stream, "\t.code_address %s\n", name);
9274 	}
9275     }
9276   else
9277     {
9278       if (link->lkind == KIND_LOCAL)
9279 	{
9280 	  /* Local and used, build linkage pair.  */
9281 	  fprintf (stream, "\t.quad %s..en\n", name);
9282 	  fprintf (stream, "\t.quad %s\n", name);
9283 	}
9284       else
9285 	{
9286 	  /* External and used, request linkage pair.  */
9287 	  fprintf (stream, "\t.linkage %s\n", name);
9288 	}
9289     }
9290 
9291   return 0;
9292 }
9293 
9294 static void
alpha_write_linkage(stream,funname,fundecl)9295 alpha_write_linkage (stream, funname, fundecl)
9296      FILE *stream;
9297      const char *funname;
9298      tree fundecl;
9299 {
9300   splay_tree_node node;
9301   struct alpha_funcs *func;
9302 
9303   link_section ();
9304   fprintf (stream, "\t.align 3\n");
9305   node = splay_tree_lookup (alpha_funcs_tree, (splay_tree_key) fundecl);
9306   func = (struct alpha_funcs *) node->value;
9307 
9308   fputs ("\t.name ", stream);
9309   assemble_name (stream, funname);
9310   fputs ("..na\n", stream);
9311   ASM_OUTPUT_LABEL (stream, funname);
9312   fprintf (stream, "\t.pdesc ");
9313   assemble_name (stream, funname);
9314   fprintf (stream, "..en,%s\n",
9315 	   alpha_procedure_type == PT_STACK ? "stack"
9316 	   : alpha_procedure_type == PT_REGISTER ? "reg" : "null");
9317 
9318   if (func->links)
9319     {
9320       splay_tree_foreach (func->links, alpha_write_one_linkage, stream);
9321       /* splay_tree_delete (func->links); */
9322     }
9323 }
9324 
9325 /* Given a decl, a section name, and whether the decl initializer
9326    has relocs, choose attributes for the section.  */
9327 
9328 #define SECTION_VMS_OVERLAY	SECTION_FORGET
9329 #define SECTION_VMS_GLOBAL SECTION_MACH_DEP
9330 #define SECTION_VMS_INITIALIZE (SECTION_VMS_GLOBAL << 1)
9331 
9332 static unsigned int
vms_section_type_flags(decl,name,reloc)9333 vms_section_type_flags (decl, name, reloc)
9334      tree decl;
9335      const char *name;
9336      int reloc;
9337 {
9338   unsigned int flags = default_section_type_flags (decl, name, reloc);
9339 
9340   if (decl && DECL_ATTRIBUTES (decl)
9341       && lookup_attribute ("overlaid", DECL_ATTRIBUTES (decl)))
9342     flags |= SECTION_VMS_OVERLAY;
9343   if (decl && DECL_ATTRIBUTES (decl)
9344       && lookup_attribute ("global", DECL_ATTRIBUTES (decl)))
9345     flags |= SECTION_VMS_GLOBAL;
9346   if (decl && DECL_ATTRIBUTES (decl)
9347       && lookup_attribute ("initialize", DECL_ATTRIBUTES (decl)))
9348     flags |= SECTION_VMS_INITIALIZE;
9349 
9350   return flags;
9351 }
9352 
9353 /* Switch to an arbitrary section NAME with attributes as specified
9354    by FLAGS.  ALIGN specifies any known alignment requirements for
9355    the section; 0 if the default should be used.  */
9356 
9357 static void
vms_asm_named_section(name,flags)9358 vms_asm_named_section (name, flags)
9359      const char *name;
9360      unsigned int flags;
9361 {
9362   fputc ('\n', asm_out_file);
9363   fprintf (asm_out_file, ".section\t%s", name);
9364 
9365   if (flags & SECTION_VMS_OVERLAY)
9366     fprintf (asm_out_file, ",OVR");
9367   if (flags & SECTION_VMS_GLOBAL)
9368     fprintf (asm_out_file, ",GBL");
9369   if (flags & SECTION_VMS_INITIALIZE)
9370     fprintf (asm_out_file, ",NOMOD");
9371   if (flags & SECTION_DEBUG)
9372     fprintf (asm_out_file, ",NOWRT");
9373 
9374   fputc ('\n', asm_out_file);
9375 }
9376 
9377 /* Record an element in the table of global constructors.  SYMBOL is
9378    a SYMBOL_REF of the function to be called; PRIORITY is a number
9379    between 0 and MAX_INIT_PRIORITY.
9380 
9381    Differs from default_ctors_section_asm_out_constructor in that the
9382    width of the .ctors entry is always 64 bits, rather than the 32 bits
9383    used by a normal pointer.  */
9384 
9385 static void
vms_asm_out_constructor(symbol,priority)9386 vms_asm_out_constructor (symbol, priority)
9387      rtx symbol;
9388      int priority ATTRIBUTE_UNUSED;
9389 {
9390   ctors_section ();
9391   assemble_align (BITS_PER_WORD);
9392   assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
9393 }
9394 
9395 static void
vms_asm_out_destructor(symbol,priority)9396 vms_asm_out_destructor (symbol, priority)
9397      rtx symbol;
9398      int priority ATTRIBUTE_UNUSED;
9399 {
9400   dtors_section ();
9401   assemble_align (BITS_PER_WORD);
9402   assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
9403 }
9404 #else
9405 
9406 rtx
alpha_need_linkage(name,is_local)9407 alpha_need_linkage (name, is_local)
9408      const char *name ATTRIBUTE_UNUSED;
9409      int is_local ATTRIBUTE_UNUSED;
9410 {
9411   return NULL_RTX;
9412 }
9413 
9414 rtx
alpha_use_linkage(linkage,cfundecl,lflag,rflag)9415 alpha_use_linkage (linkage, cfundecl, lflag, rflag)
9416      rtx linkage ATTRIBUTE_UNUSED;
9417      tree cfundecl ATTRIBUTE_UNUSED;
9418      int lflag ATTRIBUTE_UNUSED;
9419      int rflag ATTRIBUTE_UNUSED;
9420 {
9421   return NULL_RTX;
9422 }
9423 
9424 #endif /* TARGET_ABI_OPEN_VMS */
9425 
9426 #if TARGET_ABI_UNICOSMK
9427 
9428 static void unicosmk_output_module_name PARAMS ((FILE *));
9429 static void unicosmk_output_default_externs PARAMS ((FILE *));
9430 static void unicosmk_output_dex PARAMS ((FILE *));
9431 static void unicosmk_output_externs PARAMS ((FILE *));
9432 static void unicosmk_output_addr_vec PARAMS ((FILE *, rtx));
9433 static const char *unicosmk_ssib_name PARAMS ((void));
9434 static int unicosmk_special_name PARAMS ((const char *));
9435 
9436 /* Define the offset between two registers, one to be eliminated, and the
9437    other its replacement, at the start of a routine.  */
9438 
9439 int
unicosmk_initial_elimination_offset(from,to)9440 unicosmk_initial_elimination_offset (from, to)
9441       int from;
9442       int to;
9443 {
9444   int fixed_size;
9445 
9446   fixed_size = alpha_sa_size();
9447   if (fixed_size != 0)
9448     fixed_size += 48;
9449 
9450   if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
9451     return -fixed_size;
9452   else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
9453     return 0;
9454   else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
9455     return (ALPHA_ROUND (current_function_outgoing_args_size)
9456 	    + ALPHA_ROUND (get_frame_size()));
9457   else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
9458     return (ALPHA_ROUND (fixed_size)
9459 	    + ALPHA_ROUND (get_frame_size()
9460 			   + current_function_outgoing_args_size));
9461   else
9462     abort ();
9463 }
9464 
9465 /* Output the module name for .ident and .end directives. We have to strip
9466    directories and add make sure that the module name starts with a letter
9467    or '$'.  */
9468 
9469 static void
unicosmk_output_module_name(file)9470 unicosmk_output_module_name (file)
9471       FILE *file;
9472 {
9473   const char *name;
9474 
9475   /* Strip directories.  */
9476 
9477   name = strrchr (main_input_filename, '/');
9478   if (name)
9479     ++name;
9480   else
9481     name = main_input_filename;
9482 
9483   /* CAM only accepts module names that start with a letter or '$'. We
9484      prefix the module name with a '$' if necessary.  */
9485 
9486   if (!ISALPHA (*name))
9487     putc ('$', file);
9488   output_clean_symbol_name (file, name);
9489 }
9490 
9491 /* Output text that to appear at the beginning of an assembler file.  */
9492 
9493 void
unicosmk_asm_file_start(file)9494 unicosmk_asm_file_start (file)
9495       FILE *file;
9496 {
9497   int i;
9498 
9499   fputs ("\t.ident\t", file);
9500   unicosmk_output_module_name (file);
9501   fputs ("\n\n", file);
9502 
9503   /* The Unicos/Mk assembler uses different register names. Instead of trying
9504      to support them, we simply use micro definitions.  */
9505 
9506   /* CAM has different register names: rN for the integer register N and fN
9507      for the floating-point register N. Instead of trying to use these in
9508      alpha.md, we define the symbols $N and $fN to refer to the appropriate
9509      register.  */
9510 
9511   for (i = 0; i < 32; ++i)
9512     fprintf (file, "$%d <- r%d\n", i, i);
9513 
9514   for (i = 0; i < 32; ++i)
9515     fprintf (file, "$f%d <- f%d\n", i, i);
9516 
9517   putc ('\n', file);
9518 
9519   /* The .align directive fill unused space with zeroes which does not work
9520      in code sections. We define the macro 'gcc@code@align' which uses nops
9521      instead. Note that it assumes that code sections always have the
9522      biggest possible alignment since . refers to the current offset from
9523      the beginning of the section.  */
9524 
9525   fputs ("\t.macro gcc@code@align n\n", file);
9526   fputs ("gcc@n@bytes = 1 << n\n", file);
9527   fputs ("gcc@here = . % gcc@n@bytes\n", file);
9528   fputs ("\t.if ne, gcc@here, 0\n", file);
9529   fputs ("\t.repeat (gcc@n@bytes - gcc@here) / 4\n", file);
9530   fputs ("\tbis r31,r31,r31\n", file);
9531   fputs ("\t.endr\n", file);
9532   fputs ("\t.endif\n", file);
9533   fputs ("\t.endm gcc@code@align\n\n", file);
9534 
9535   /* Output extern declarations which should always be visible.  */
9536   unicosmk_output_default_externs (file);
9537 
9538   /* Open a dummy section. We always need to be inside a section for the
9539      section-switching code to work correctly.
9540      ??? This should be a module id or something like that. I still have to
9541      figure out what the rules for those are.  */
9542   fputs ("\n\t.psect\t$SG00000,data\n", file);
9543 }
9544 
9545 /* Output text to appear at the end of an assembler file. This includes all
9546    pending extern declarations and DEX expressions.  */
9547 
9548 void
unicosmk_asm_file_end(file)9549 unicosmk_asm_file_end (file)
9550       FILE *file;
9551 {
9552   fputs ("\t.endp\n\n", file);
9553 
9554   /* Output all pending externs.  */
9555 
9556   unicosmk_output_externs (file);
9557 
9558   /* Output dex definitions used for functions whose names conflict with
9559      register names.  */
9560 
9561   unicosmk_output_dex (file);
9562 
9563   fputs ("\t.end\t", file);
9564   unicosmk_output_module_name (file);
9565   putc ('\n', file);
9566 }
9567 
9568 /* Output the definition of a common variable.  */
9569 
9570 void
unicosmk_output_common(file,name,size,align)9571 unicosmk_output_common (file, name, size, align)
9572       FILE *file;
9573       const char *name;
9574       int size;
9575       int align;
9576 {
9577   tree name_tree;
9578   printf ("T3E__: common %s\n", name);
9579 
9580   common_section ();
9581   fputs("\t.endp\n\n\t.psect ", file);
9582   assemble_name(file, name);
9583   fprintf(file, ",%d,common\n", floor_log2 (align / BITS_PER_UNIT));
9584   fprintf(file, "\t.byte\t0:%d\n", size);
9585 
9586   /* Mark the symbol as defined in this module.  */
9587   name_tree = get_identifier (name);
9588   TREE_ASM_WRITTEN (name_tree) = 1;
9589 }
9590 
9591 #define SECTION_PUBLIC SECTION_MACH_DEP
9592 #define SECTION_MAIN (SECTION_PUBLIC << 1)
9593 static int current_section_align;
9594 
9595 static unsigned int
unicosmk_section_type_flags(decl,name,reloc)9596 unicosmk_section_type_flags (decl, name, reloc)
9597      tree decl;
9598      const char *name;
9599      int reloc ATTRIBUTE_UNUSED;
9600 {
9601   unsigned int flags = default_section_type_flags (decl, name, reloc);
9602 
9603   if (!decl)
9604     return flags;
9605 
9606   if (TREE_CODE (decl) == FUNCTION_DECL)
9607     {
9608       current_section_align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT);
9609       if (align_functions_log > current_section_align)
9610 	current_section_align = align_functions_log;
9611 
9612       if (! strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), "main"))
9613 	flags |= SECTION_MAIN;
9614     }
9615   else
9616     current_section_align = floor_log2 (DECL_ALIGN (decl) / BITS_PER_UNIT);
9617 
9618   if (TREE_PUBLIC (decl))
9619     flags |= SECTION_PUBLIC;
9620 
9621   return flags;
9622 }
9623 
9624 /* Generate a section name for decl and associate it with the
9625    declaration.  */
9626 
9627 static void
unicosmk_unique_section(decl,reloc)9628 unicosmk_unique_section (decl, reloc)
9629       tree decl;
9630       int reloc ATTRIBUTE_UNUSED;
9631 {
9632   const char *name;
9633   int len;
9634 
9635   if (!decl)
9636     abort ();
9637 
9638   name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
9639   name = alpha_strip_name_encoding (name);
9640   len = strlen (name);
9641 
9642   if (TREE_CODE (decl) == FUNCTION_DECL)
9643     {
9644       char *string;
9645 
9646       /* It is essential that we prefix the section name here because
9647 	 otherwise the section names generated for constructors and
9648 	 destructors confuse collect2.  */
9649 
9650       string = alloca (len + 6);
9651       sprintf (string, "code@%s", name);
9652       DECL_SECTION_NAME (decl) = build_string (len + 5, string);
9653     }
9654   else if (TREE_PUBLIC (decl))
9655     DECL_SECTION_NAME (decl) = build_string (len, name);
9656   else
9657     {
9658       char *string;
9659 
9660       string = alloca (len + 6);
9661       sprintf (string, "data@%s", name);
9662       DECL_SECTION_NAME (decl) = build_string (len + 5, string);
9663     }
9664 }
9665 
9666 /* Switch to an arbitrary section NAME with attributes as specified
9667    by FLAGS.  ALIGN specifies any known alignment requirements for
9668    the section; 0 if the default should be used.  */
9669 
9670 static void
unicosmk_asm_named_section(name,flags)9671 unicosmk_asm_named_section (name, flags)
9672      const char *name;
9673      unsigned int flags;
9674 {
9675   const char *kind;
9676 
9677   /* Close the previous section.  */
9678 
9679   fputs ("\t.endp\n\n", asm_out_file);
9680 
9681   /* Find out what kind of section we are opening.  */
9682 
9683   if (flags & SECTION_MAIN)
9684     fputs ("\t.start\tmain\n", asm_out_file);
9685 
9686   if (flags & SECTION_CODE)
9687     kind = "code";
9688   else if (flags & SECTION_PUBLIC)
9689     kind = "common";
9690   else
9691     kind = "data";
9692 
9693   if (current_section_align != 0)
9694     fprintf (asm_out_file, "\t.psect\t%s,%d,%s\n", name,
9695 	     current_section_align, kind);
9696   else
9697     fprintf (asm_out_file, "\t.psect\t%s,%s\n", name, kind);
9698 }
9699 
9700 static void
unicosmk_insert_attributes(decl,attr_ptr)9701 unicosmk_insert_attributes (decl, attr_ptr)
9702      tree decl;
9703      tree *attr_ptr ATTRIBUTE_UNUSED;
9704 {
9705   if (DECL_P (decl)
9706       && (TREE_PUBLIC (decl) || TREE_CODE (decl) == FUNCTION_DECL))
9707     unicosmk_unique_section (decl, 0);
9708 }
9709 
9710 /* Output an alignment directive. We have to use the macro 'gcc@code@align'
9711    in code sections because .align fill unused space with zeroes.  */
9712 
9713 void
unicosmk_output_align(file,align)9714 unicosmk_output_align (file, align)
9715       FILE *file;
9716       int align;
9717 {
9718   if (inside_function)
9719     fprintf (file, "\tgcc@code@align\t%d\n", align);
9720   else
9721     fprintf (file, "\t.align\t%d\n", align);
9722 }
9723 
9724 /* Add a case vector to the current function's list of deferred case
9725    vectors. Case vectors have to be put into a separate section because CAM
9726    does not allow data definitions in code sections.  */
9727 
9728 void
unicosmk_defer_case_vector(lab,vec)9729 unicosmk_defer_case_vector (lab, vec)
9730       rtx lab;
9731       rtx vec;
9732 {
9733   struct machine_function *machine = cfun->machine;
9734 
9735   vec = gen_rtx_EXPR_LIST (VOIDmode, lab, vec);
9736   machine->addr_list = gen_rtx_EXPR_LIST (VOIDmode, vec,
9737 					  machine->addr_list);
9738 }
9739 
9740 /* Output a case vector.  */
9741 
9742 static void
unicosmk_output_addr_vec(file,vec)9743 unicosmk_output_addr_vec (file, vec)
9744       FILE *file;
9745       rtx vec;
9746 {
9747   rtx lab  = XEXP (vec, 0);
9748   rtx body = XEXP (vec, 1);
9749   int vlen = XVECLEN (body, 0);
9750   int idx;
9751 
9752   ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (lab));
9753 
9754   for (idx = 0; idx < vlen; idx++)
9755     {
9756       ASM_OUTPUT_ADDR_VEC_ELT
9757         (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
9758     }
9759 }
9760 
9761 /* Output current function's deferred case vectors.  */
9762 
9763 static void
unicosmk_output_deferred_case_vectors(file)9764 unicosmk_output_deferred_case_vectors (file)
9765       FILE *file;
9766 {
9767   struct machine_function *machine = cfun->machine;
9768   rtx t;
9769 
9770   if (machine->addr_list == NULL_RTX)
9771     return;
9772 
9773   data_section ();
9774   for (t = machine->addr_list; t; t = XEXP (t, 1))
9775     unicosmk_output_addr_vec (file, XEXP (t, 0));
9776 }
9777 
9778 /* Set up the dynamic subprogram information block (DSIB) and update the
9779    frame pointer register ($15) for subroutines which have a frame. If the
9780    subroutine doesn't have a frame, simply increment $15.  */
9781 
9782 static void
unicosmk_gen_dsib(imaskP)9783 unicosmk_gen_dsib (imaskP)
9784       unsigned long * imaskP;
9785 {
9786   if (alpha_procedure_type == PT_STACK)
9787     {
9788       const char *ssib_name;
9789       rtx mem;
9790 
9791       /* Allocate 64 bytes for the DSIB.  */
9792 
9793       FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
9794                                   GEN_INT (-64))));
9795       emit_insn (gen_blockage ());
9796 
9797       /* Save the return address.  */
9798 
9799       mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 56));
9800       set_mem_alias_set (mem, alpha_sr_alias_set);
9801       FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_RA)));
9802       (*imaskP) &= ~(1L << REG_RA);
9803 
9804       /* Save the old frame pointer.  */
9805 
9806       mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 48));
9807       set_mem_alias_set (mem, alpha_sr_alias_set);
9808       FRP (emit_move_insn (mem, hard_frame_pointer_rtx));
9809       (*imaskP) &= ~(1L << HARD_FRAME_POINTER_REGNUM);
9810 
9811       emit_insn (gen_blockage ());
9812 
9813       /* Store the SSIB pointer.  */
9814 
9815       ssib_name = ggc_strdup (unicosmk_ssib_name ());
9816       mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 32));
9817       set_mem_alias_set (mem, alpha_sr_alias_set);
9818 
9819       FRP (emit_move_insn (gen_rtx_REG (DImode, 5),
9820                            gen_rtx_SYMBOL_REF (Pmode, ssib_name)));
9821       FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 5)));
9822 
9823       /* Save the CIW index.  */
9824 
9825       mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 24));
9826       set_mem_alias_set (mem, alpha_sr_alias_set);
9827       FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 25)));
9828 
9829       emit_insn (gen_blockage ());
9830 
9831       /* Set the new frame pointer.  */
9832 
9833       FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
9834                                   stack_pointer_rtx, GEN_INT (64))));
9835 
9836     }
9837   else
9838     {
9839       /* Increment the frame pointer register to indicate that we do not
9840          have a frame.  */
9841 
9842       FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
9843                                   hard_frame_pointer_rtx, GEN_INT (1))));
9844     }
9845 }
9846 
9847 #define SSIB_PREFIX "__SSIB_"
9848 #define SSIB_PREFIX_LEN 7
9849 
9850 /* Generate the name of the SSIB section for the current function.  */
9851 
9852 static const char *
unicosmk_ssib_name()9853 unicosmk_ssib_name ()
9854 {
9855   /* This is ok since CAM won't be able to deal with names longer than that
9856      anyway.  */
9857 
9858   static char name[256];
9859 
9860   rtx x;
9861   const char *fnname;
9862   int len;
9863 
9864   x = DECL_RTL (cfun->decl);
9865   if (GET_CODE (x) != MEM)
9866     abort ();
9867   x = XEXP (x, 0);
9868   if (GET_CODE (x) != SYMBOL_REF)
9869     abort ();
9870   fnname = alpha_strip_name_encoding (XSTR (x, 0));
9871 
9872   len = strlen (fnname);
9873   if (len + SSIB_PREFIX_LEN > 255)
9874     len = 255 - SSIB_PREFIX_LEN;
9875 
9876   strcpy (name, SSIB_PREFIX);
9877   strncpy (name + SSIB_PREFIX_LEN, fnname, len);
9878   name[len + SSIB_PREFIX_LEN] = 0;
9879 
9880   return name;
9881 }
9882 
9883 /* Output the static subroutine information block for the current
9884    function.  */
9885 
9886 static void
unicosmk_output_ssib(file,fnname)9887 unicosmk_output_ssib (file, fnname)
9888       FILE *file;
9889       const char *fnname;
9890 {
9891   int len;
9892   int i;
9893   rtx x;
9894   rtx ciw;
9895   struct machine_function *machine = cfun->machine;
9896 
9897   ssib_section ();
9898   fprintf (file, "\t.endp\n\n\t.psect\t%s%s,data\n", user_label_prefix,
9899 	   unicosmk_ssib_name ());
9900 
9901   /* Some required stuff and the function name length.  */
9902 
9903   len = strlen (fnname);
9904   fprintf (file, "\t.quad\t^X20008%2.2X28\n", len);
9905 
9906   /* Saved registers
9907      ??? We don't do that yet.  */
9908 
9909   fputs ("\t.quad\t0\n", file);
9910 
9911   /* Function address.  */
9912 
9913   fputs ("\t.quad\t", file);
9914   assemble_name (file, fnname);
9915   putc ('\n', file);
9916 
9917   fputs ("\t.quad\t0\n", file);
9918   fputs ("\t.quad\t0\n", file);
9919 
9920   /* Function name.
9921      ??? We do it the same way Cray CC does it but this could be
9922      simplified.  */
9923 
9924   for( i = 0; i < len; i++ )
9925     fprintf (file, "\t.byte\t%d\n", (int)(fnname[i]));
9926   if( (len % 8) == 0 )
9927     fputs ("\t.quad\t0\n", file);
9928   else
9929     fprintf (file, "\t.bits\t%d : 0\n", (8 - (len % 8))*8);
9930 
9931   /* All call information words used in the function.  */
9932 
9933   for (x = machine->first_ciw; x; x = XEXP (x, 1))
9934     {
9935       ciw = XEXP (x, 0);
9936       fprintf (file, "\t.quad\t");
9937 #if HOST_BITS_PER_WIDE_INT == 32
9938       fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
9939 	       CONST_DOUBLE_HIGH (ciw), CONST_DOUBLE_LOW (ciw));
9940 #else
9941       fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (ciw));
9942 #endif
9943       fprintf (file, "\n");
9944     }
9945 }
9946 
9947 /* Add a call information word (CIW) to the list of the current function's
9948    CIWs and return its index.
9949 
9950    X is a CONST_INT or CONST_DOUBLE representing the CIW.  */
9951 
9952 rtx
unicosmk_add_call_info_word(x)9953 unicosmk_add_call_info_word (x)
9954       rtx x;
9955 {
9956   rtx node;
9957   struct machine_function *machine = cfun->machine;
9958 
9959   node = gen_rtx_EXPR_LIST (VOIDmode, x, NULL_RTX);
9960   if (machine->first_ciw == NULL_RTX)
9961     machine->first_ciw = node;
9962   else
9963     XEXP (machine->last_ciw, 1) = node;
9964 
9965   machine->last_ciw = node;
9966   ++machine->ciw_count;
9967 
9968   return GEN_INT (machine->ciw_count
9969 		  + strlen (current_function_name)/8 + 5);
9970 }
9971 
9972 static char unicosmk_section_buf[100];
9973 
9974 char *
unicosmk_text_section()9975 unicosmk_text_section ()
9976 {
9977   static int count = 0;
9978   sprintf (unicosmk_section_buf, "\t.endp\n\n\t.psect\tgcc@text___%d,code",
9979 				 count++);
9980   return unicosmk_section_buf;
9981 }
9982 
9983 char *
unicosmk_data_section()9984 unicosmk_data_section ()
9985 {
9986   static int count = 1;
9987   sprintf (unicosmk_section_buf, "\t.endp\n\n\t.psect\tgcc@data___%d,data",
9988 				 count++);
9989   return unicosmk_section_buf;
9990 }
9991 
9992 /* The Cray assembler doesn't accept extern declarations for symbols which
9993    are defined in the same file. We have to keep track of all global
9994    symbols which are referenced and/or defined in a source file and output
9995    extern declarations for those which are referenced but not defined at
9996    the end of file.  */
9997 
9998 /* List of identifiers for which an extern declaration might have to be
9999    emitted.  */
10000 
10001 struct unicosmk_extern_list
10002 {
10003   struct unicosmk_extern_list *next;
10004   const char *name;
10005 };
10006 
10007 static struct unicosmk_extern_list *unicosmk_extern_head = 0;
10008 
10009 /* Output extern declarations which are required for every asm file.  */
10010 
10011 static void
unicosmk_output_default_externs(file)10012 unicosmk_output_default_externs (file)
10013 	FILE *file;
10014 {
10015   static const char *const externs[] =
10016     { "__T3E_MISMATCH" };
10017 
10018   int i;
10019   int n;
10020 
10021   n = ARRAY_SIZE (externs);
10022 
10023   for (i = 0; i < n; i++)
10024     fprintf (file, "\t.extern\t%s\n", externs[i]);
10025 }
10026 
10027 /* Output extern declarations for global symbols which are have been
10028    referenced but not defined.  */
10029 
10030 static void
unicosmk_output_externs(file)10031 unicosmk_output_externs (file)
10032       FILE *file;
10033 {
10034   struct unicosmk_extern_list *p;
10035   const char *real_name;
10036   int len;
10037   tree name_tree;
10038 
10039   len = strlen (user_label_prefix);
10040   for (p = unicosmk_extern_head; p != 0; p = p->next)
10041     {
10042       /* We have to strip the encoding and possibly remove user_label_prefix
10043 	 from the identifier in order to handle -fleading-underscore and
10044 	 explicit asm names correctly (cf. gcc.dg/asm-names-1.c).  */
10045       real_name = alpha_strip_name_encoding (p->name);
10046       if (len && p->name[0] == '*'
10047 	  && !memcmp (real_name, user_label_prefix, len))
10048 	real_name += len;
10049 
10050       name_tree = get_identifier (real_name);
10051       if (! TREE_ASM_WRITTEN (name_tree))
10052 	{
10053 	  TREE_ASM_WRITTEN (name_tree) = 1;
10054 	  fputs ("\t.extern\t", file);
10055 	  assemble_name (file, p->name);
10056 	  putc ('\n', file);
10057 	}
10058     }
10059 }
10060 
10061 /* Record an extern.  */
10062 
10063 void
unicosmk_add_extern(name)10064 unicosmk_add_extern (name)
10065      const char *name;
10066 {
10067   struct unicosmk_extern_list *p;
10068 
10069   p = (struct unicosmk_extern_list *)
10070        xmalloc (sizeof (struct unicosmk_extern_list));
10071   p->next = unicosmk_extern_head;
10072   p->name = name;
10073   unicosmk_extern_head = p;
10074 }
10075 
10076 /* The Cray assembler generates incorrect code if identifiers which
10077    conflict with register names are used as instruction operands. We have
10078    to replace such identifiers with DEX expressions.  */
10079 
10080 /* Structure to collect identifiers which have been replaced by DEX
10081    expressions.  */
10082 
10083 struct unicosmk_dex {
10084   struct unicosmk_dex *next;
10085   const char *name;
10086 };
10087 
10088 /* List of identifiers which have been replaced by DEX expressions. The DEX
10089    number is determined by the position in the list.  */
10090 
10091 static struct unicosmk_dex *unicosmk_dex_list = NULL;
10092 
10093 /* The number of elements in the DEX list.  */
10094 
10095 static int unicosmk_dex_count = 0;
10096 
10097 /* Check if NAME must be replaced by a DEX expression.  */
10098 
10099 static int
unicosmk_special_name(name)10100 unicosmk_special_name (name)
10101       const char *name;
10102 {
10103   if (name[0] == '*')
10104     ++name;
10105 
10106   if (name[0] == '$')
10107     ++name;
10108 
10109   if (name[0] != 'r' && name[0] != 'f' && name[0] != 'R' && name[0] != 'F')
10110     return 0;
10111 
10112   switch (name[1])
10113     {
10114     case '1':  case '2':
10115       return (name[2] == '\0' || (ISDIGIT (name[2]) && name[3] == '\0'));
10116 
10117     case '3':
10118       return (name[2] == '\0'
10119 	       || ((name[2] == '0' || name[2] == '1') && name[3] == '\0'));
10120 
10121     default:
10122       return (ISDIGIT (name[1]) && name[2] == '\0');
10123     }
10124 }
10125 
10126 /* Return the DEX number if X must be replaced by a DEX expression and 0
10127    otherwise.  */
10128 
10129 static int
unicosmk_need_dex(x)10130 unicosmk_need_dex (x)
10131       rtx x;
10132 {
10133   struct unicosmk_dex *dex;
10134   const char *name;
10135   int i;
10136 
10137   if (GET_CODE (x) != SYMBOL_REF)
10138     return 0;
10139 
10140   name = XSTR (x,0);
10141   if (! unicosmk_special_name (name))
10142     return 0;
10143 
10144   i = unicosmk_dex_count;
10145   for (dex = unicosmk_dex_list; dex; dex = dex->next)
10146     {
10147       if (! strcmp (name, dex->name))
10148         return i;
10149       --i;
10150     }
10151 
10152   dex = (struct unicosmk_dex *) xmalloc (sizeof (struct unicosmk_dex));
10153   dex->name = name;
10154   dex->next = unicosmk_dex_list;
10155   unicosmk_dex_list = dex;
10156 
10157   ++unicosmk_dex_count;
10158   return unicosmk_dex_count;
10159 }
10160 
10161 /* Output the DEX definitions for this file.  */
10162 
10163 static void
unicosmk_output_dex(file)10164 unicosmk_output_dex (file)
10165       FILE *file;
10166 {
10167   struct unicosmk_dex *dex;
10168   int i;
10169 
10170   if (unicosmk_dex_list == NULL)
10171     return;
10172 
10173   fprintf (file, "\t.dexstart\n");
10174 
10175   i = unicosmk_dex_count;
10176   for (dex = unicosmk_dex_list; dex; dex = dex->next)
10177     {
10178       fprintf (file, "\tDEX (%d) = ", i);
10179       assemble_name (file, dex->name);
10180       putc ('\n', file);
10181       --i;
10182     }
10183 
10184   fprintf (file, "\t.dexend\n");
10185 }
10186 
10187 #else
10188 
10189 static void
unicosmk_output_deferred_case_vectors(file)10190 unicosmk_output_deferred_case_vectors (file)
10191       FILE *file ATTRIBUTE_UNUSED;
10192 {}
10193 
10194 static void
unicosmk_gen_dsib(imaskP)10195 unicosmk_gen_dsib (imaskP)
10196       unsigned long * imaskP ATTRIBUTE_UNUSED;
10197 {}
10198 
10199 static void
unicosmk_output_ssib(file,fnname)10200 unicosmk_output_ssib (file, fnname)
10201       FILE * file ATTRIBUTE_UNUSED;
10202       const char * fnname ATTRIBUTE_UNUSED;
10203 {}
10204 
10205 rtx
unicosmk_add_call_info_word(x)10206 unicosmk_add_call_info_word (x)
10207      rtx x ATTRIBUTE_UNUSED;
10208 {
10209   return NULL_RTX;
10210 }
10211 
10212 static int
unicosmk_need_dex(x)10213 unicosmk_need_dex (x)
10214       rtx x ATTRIBUTE_UNUSED;
10215 {
10216   return 0;
10217 }
10218 
10219 #endif /* TARGET_ABI_UNICOSMK */
10220 
10221 #include "gt-alpha.h"
10222 
10223