1 /* Subroutines for insn-output.c for VAX.
2    Copyright (C) 1987-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #define IN_TARGET_CODE 1
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "stringpool.h"
30 #include "attribs.h"
31 #include "df.h"
32 #include "memmodel.h"
33 #include "tm_p.h"
34 #include "optabs.h"
35 #include "regs.h"
36 #include "emit-rtl.h"
37 #include "calls.h"
38 #include "varasm.h"
39 #include "conditions.h"
40 #include "output.h"
41 #include "expr.h"
42 #include "reload.h"
43 #include "builtins.h"
44 
45 /* This file should be included last.  */
46 #include "target-def.h"
47 
48 static void vax_option_override (void);
49 static bool vax_legitimate_address_p (machine_mode, rtx, bool);
50 static void vax_file_start (void);
51 static void vax_init_libfuncs (void);
52 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
53 				 HOST_WIDE_INT, tree);
54 static int vax_address_cost_1 (rtx);
55 static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
56 static bool vax_rtx_costs (rtx, machine_mode, int, int, int *, bool);
57 static rtx vax_function_arg (cumulative_args_t, const function_arg_info &);
58 static void vax_function_arg_advance (cumulative_args_t,
59 				      const function_arg_info &);
60 static rtx vax_struct_value_rtx (tree, int);
61 static void vax_asm_trampoline_template (FILE *);
62 static void vax_trampoline_init (rtx, tree, rtx);
63 static poly_int64 vax_return_pops_args (tree, tree, poly_int64);
64 static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
65 static HOST_WIDE_INT vax_starting_frame_offset (void);
66 
67 /* Initialize the GCC target structure.  */
68 #undef TARGET_ASM_ALIGNED_HI_OP
69 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
70 
71 #undef TARGET_ASM_FILE_START
72 #define TARGET_ASM_FILE_START vax_file_start
73 #undef TARGET_ASM_FILE_START_APP_OFF
74 #define TARGET_ASM_FILE_START_APP_OFF true
75 
76 #undef TARGET_INIT_LIBFUNCS
77 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
78 
79 #undef TARGET_ASM_OUTPUT_MI_THUNK
80 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
81 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
82 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
83 
84 #undef TARGET_RTX_COSTS
85 #define TARGET_RTX_COSTS vax_rtx_costs
86 #undef TARGET_ADDRESS_COST
87 #define TARGET_ADDRESS_COST vax_address_cost
88 
89 #undef TARGET_PROMOTE_PROTOTYPES
90 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
91 
92 #undef TARGET_FUNCTION_ARG
93 #define TARGET_FUNCTION_ARG vax_function_arg
94 #undef TARGET_FUNCTION_ARG_ADVANCE
95 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
96 
97 #undef TARGET_STRUCT_VALUE_RTX
98 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
99 
100 #undef TARGET_LRA_P
101 #define TARGET_LRA_P hook_bool_void_false
102 
103 #undef TARGET_LEGITIMATE_ADDRESS_P
104 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
105 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
106 #define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
107 
108 #undef TARGET_FRAME_POINTER_REQUIRED
109 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
110 
111 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
112 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
113 #undef TARGET_TRAMPOLINE_INIT
114 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
115 #undef TARGET_RETURN_POPS_ARGS
116 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
117 
118 #undef TARGET_OPTION_OVERRIDE
119 #define TARGET_OPTION_OVERRIDE vax_option_override
120 
121 #if TARGET_ELF
122 #undef TARGET_BINDS_LOCAL_P
123 #define TARGET_BINDS_LOCAL_P vax_elf_binds_local_p
124 
125 static bool
vax_elf_binds_local_p(const_tree exp)126 vax_elf_binds_local_p (const_tree exp)
127 {
128   return default_binds_local_p_3 (exp, (flag_shlib | flag_pic) != 0,
129 				  true, false, false);
130 }
131 #endif
132 
133 #undef TARGET_STARTING_FRAME_OFFSET
134 #define TARGET_STARTING_FRAME_OFFSET vax_starting_frame_offset
135 
136 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
137 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
138 
139 struct gcc_target targetm = TARGET_INITIALIZER;
140 
141 /* Set global variables as needed for the options enabled.  */
142 
143 static void
vax_option_override(void)144 vax_option_override (void)
145 {
146   /* We're VAX floating point, not IEEE floating point.  */
147   if (TARGET_G_FLOAT)
148     REAL_MODE_FORMAT (DFmode) = &vax_g_format;
149 
150   flag_dwarf2_cfi_asm = 0;
151 
152 #ifdef SUBTARGET_OVERRIDE_OPTIONS
153   SUBTARGET_OVERRIDE_OPTIONS;
154 #endif
155 }
156 
157 static void
vax_add_reg_cfa_offset(rtx insn,int offset,rtx src)158 vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
159 {
160   rtx x;
161 
162   x = plus_constant (Pmode, frame_pointer_rtx, offset);
163   x = gen_rtx_MEM (SImode, x);
164   x = gen_rtx_SET (x, src);
165   add_reg_note (insn, REG_CFA_OFFSET, x);
166 }
167 
168 /* Generate the assembly code for function entry.  FILE is a stdio
169    stream to output the code to.  SIZE is an int: how many units of
170    temporary storage to allocate.
171 
172    Refer to the array `regs_ever_live' to determine which registers to
173    save; `regs_ever_live[I]' is nonzero if register number I is ever
174    used in the function.  This function is responsible for knowing
175    which registers should not be saved even if used.  */
176 
177 void
vax_expand_prologue(void)178 vax_expand_prologue (void)
179 {
180   int regno, offset;
181   int mask = 0;
182   HOST_WIDE_INT size;
183   rtx insn;
184 
185   offset = 20;
186   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
187     if (df_regs_ever_live_p (regno) && !call_used_or_fixed_reg_p (regno))
188       {
189         mask |= 1 << regno;
190         offset += 4;
191       }
192 
193   insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
194   RTX_FRAME_RELATED_P (insn) = 1;
195 
196   /* The layout of the CALLG/S stack frame is follows:
197 
198 		<- CFA, AP
199 	r11
200 	r10
201 	...	Registers saved as specified by MASK
202 	r3
203 	r2
204 	return-addr
205 	old fp
206 	old ap
207 	old psw
208 	zero
209 		<- FP, SP
210 
211      The rest of the prologue will adjust the SP for the local frame.  */
212 
213   add_reg_note (insn, REG_CFA_DEF_CFA,
214                 plus_constant (Pmode, frame_pointer_rtx, offset));
215   insn = emit_insn (gen_blockage ());
216   RTX_FRAME_RELATED_P (insn) = 1;
217 
218 #ifdef notyet
219   /*
220    * We can't do this, the dwarf code asserts and we don't have yet a
221    * way to get to the psw
222    */
223   vax_add_reg_cfa_offset (insn, 4, gen_rtx_REG (Pmode, PSW_REGNUM));
224 #endif
225   vax_add_reg_cfa_offset (insn, 8, arg_pointer_rtx);
226   vax_add_reg_cfa_offset (insn, 12, frame_pointer_rtx);
227   vax_add_reg_cfa_offset (insn, 16, pc_rtx);
228 
229   offset = 20;
230   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
231     if (mask & (1 << regno))
232       {
233 	vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
234 	offset += 4;
235       }
236 
237   /* Allocate the local stack frame.  */
238   size = get_frame_size ();
239   size -= vax_starting_frame_offset ();
240   emit_insn (gen_addsi3 (stack_pointer_rtx,
241 			 stack_pointer_rtx, GEN_INT (-size)));
242 
243   /* Do not allow instructions referencing local stack memory to be
244      scheduled before the frame is allocated.  This is more pedantic
245      than anything else, given that VAX does not currently have a
246      scheduling description.  */
247   emit_insn (gen_blockage ());
248 }
249 
250 /* When debugging with stabs, we want to output an extra dummy label
251    so that gas can distinguish between D_float and G_float prior to
252    processing the .stabs directive identifying type double.  */
253 static void
vax_file_start(void)254 vax_file_start (void)
255 {
256   default_file_start ();
257 
258   if (write_symbols == DBX_DEBUG)
259     fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
260 }
261 
262 /* We can use the BSD C library routines for the libgcc calls that are
263    still generated, since that's what they boil down to anyways.  When
264    ELF, avoid the user's namespace.  */
265 
266 static void
vax_init_libfuncs(void)267 vax_init_libfuncs (void)
268 {
269   if (TARGET_BSD_DIVMOD)
270     {
271       set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
272       set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
273     }
274 }
275 
276 /* This is like nonimmediate_operand with a restriction on the type of MEM.  */
277 
278 static void
split_quadword_operands(rtx insn,enum rtx_code code,rtx * operands,rtx * low,int n)279 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
280 			 rtx * low, int n)
281 {
282   int i;
283 
284   for (i = 0; i < n; i++)
285     low[i] = 0;
286 
287   for (i = 0; i < n; i++)
288     {
289       if (MEM_P (operands[i])
290 	  && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
291 	      || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
292 	{
293 	  rtx addr = XEXP (operands[i], 0);
294 	  operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
295 	}
296       else if (optimize_size && MEM_P (operands[i])
297 	       && REG_P (XEXP (operands[i], 0))
298 	       && (code != MINUS || operands[1] != const0_rtx)
299 	       && find_regno_note (insn, REG_DEAD,
300 				   REGNO (XEXP (operands[i], 0))))
301 	{
302 	  low[i] = gen_rtx_MEM (SImode,
303 				gen_rtx_POST_INC (Pmode,
304 						  XEXP (operands[i], 0)));
305 	  operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
306 	}
307       else
308 	{
309 	  low[i] = operand_subword (operands[i], 0, 0, DImode);
310 	  operands[i] = operand_subword (operands[i], 1, 0, DImode);
311 	}
312     }
313 }
314 
315 void
print_operand_address(FILE * file,rtx addr)316 print_operand_address (FILE * file, rtx addr)
317 {
318   rtx orig = addr;
319   rtx reg1, breg, ireg;
320   rtx offset;
321 
322  retry:
323   switch (GET_CODE (addr))
324     {
325     case MEM:
326       fprintf (file, "*");
327       addr = XEXP (addr, 0);
328       goto retry;
329 
330     case REG:
331       fprintf (file, "(%s)", reg_names[REGNO (addr)]);
332       break;
333 
334     case PRE_DEC:
335       fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
336       break;
337 
338     case POST_INC:
339       fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
340       break;
341 
342     case PLUS:
343       /* There can be either two or three things added here.  One must be a
344 	 REG.  One can be either a REG or a MULT of a REG and an appropriate
345 	 constant, and the third can only be a constant or a MEM.
346 
347 	 We get these two or three things and put the constant or MEM in
348 	 OFFSET, the MULT or REG in IREG, and the REG in BREG.  If we have
349 	 a register and can't tell yet if it is a base or index register,
350 	 put it into REG1.  */
351 
352       reg1 = 0; ireg = 0; breg = 0; offset = 0;
353 
354       if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
355 	  || MEM_P (XEXP (addr, 0)))
356 	{
357 	  offset = XEXP (addr, 0);
358 	  addr = XEXP (addr, 1);
359 	}
360       else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
361 	       || MEM_P (XEXP (addr, 1)))
362 	{
363 	  offset = XEXP (addr, 1);
364 	  addr = XEXP (addr, 0);
365 	}
366       else if (GET_CODE (XEXP (addr, 1)) == MULT)
367 	{
368 	  ireg = XEXP (addr, 1);
369 	  addr = XEXP (addr, 0);
370 	}
371       else if (GET_CODE (XEXP (addr, 0)) == MULT)
372 	{
373 	  ireg = XEXP (addr, 0);
374 	  addr = XEXP (addr, 1);
375 	}
376       else if (REG_P (XEXP (addr, 1)))
377 	{
378 	  reg1 = XEXP (addr, 1);
379 	  addr = XEXP (addr, 0);
380 	}
381       else if (REG_P (XEXP (addr, 0)))
382 	{
383 	  reg1 = XEXP (addr, 0);
384 	  addr = XEXP (addr, 1);
385 	}
386       else
387 	{
388 	   debug_rtx (orig);
389 	   gcc_unreachable ();
390 	}
391 
392       if (REG_P (addr))
393 	{
394 	  if (reg1)
395 	    ireg = addr;
396 	  else
397 	    reg1 = addr;
398 	}
399       else if (GET_CODE (addr) == MULT)
400 	ireg = addr;
401       else if (GET_CODE (addr) == PLUS)
402 	{
403 	  if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
404 	      || MEM_P (XEXP (addr, 0)))
405 	    {
406 	      if (offset)
407 		{
408 		  if (CONST_INT_P (offset))
409 		    offset = plus_constant (Pmode, XEXP (addr, 0),
410 		                            INTVAL (offset));
411 		  else
412 		    {
413 		      gcc_assert (CONST_INT_P (XEXP (addr, 0)));
414 		      offset = plus_constant (Pmode, offset,
415 					      INTVAL (XEXP (addr, 0)));
416 		    }
417 		}
418 	      offset = XEXP (addr, 0);
419 	    }
420 	  else if (REG_P (XEXP (addr, 0)))
421 	    {
422 	      if (reg1)
423 		ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
424 	      else
425 		reg1 = XEXP (addr, 0);
426 	    }
427 	  else if (GET_CODE (XEXP (addr, 0)) == MULT && !ireg)
428 	    {
429 	      ireg = XEXP (addr, 0);
430 	    }
431 	  else
432 	    {
433 	      debug_rtx (orig);
434 	      gcc_unreachable ();
435 	    }
436 
437 	  if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
438 	      || MEM_P (XEXP (addr, 1)))
439 	    {
440 	      if (offset)
441 		{
442 		  if (CONST_INT_P (offset))
443 		    offset = plus_constant (Pmode, XEXP (addr, 1),
444 					    INTVAL (offset));
445 		  else
446 		    {
447 		      gcc_assert (CONST_INT_P (XEXP (addr, 1)));
448 		      offset = plus_constant (Pmode, offset,
449 					      INTVAL (XEXP (addr, 1)));
450 		    }
451 		}
452 	      offset = XEXP (addr, 1);
453 	    }
454 	  else if (REG_P (XEXP (addr, 1)))
455 	    {
456 	      if (reg1)
457 		ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
458 	      else
459 		reg1 = XEXP (addr, 1);
460 	    }
461 	  else if (GET_CODE (XEXP (addr, 1)) == MULT && !ireg)
462 	    {
463 	      ireg = XEXP (addr, 1);
464 	    }
465 	  else
466 	    {
467 	      debug_rtx (orig);
468 	      gcc_unreachable ();
469 	    }
470 	}
471       else
472 	{
473 	  debug_rtx (orig);
474 	  gcc_unreachable ();
475 	}
476 
477       /* If REG1 is nonzero, figure out if it is a base or index register.  */
478       if (reg1)
479 	{
480 	  if (breg
481 	      || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
482 	      || (offset
483 		  && (MEM_P (offset)
484 		      || (flag_pic && symbolic_operand (offset, SImode)))))
485 	    {
486 	      if (ireg)
487 		{
488 		  debug_rtx (orig);
489 		  gcc_unreachable ();
490 		}
491 	      ireg = reg1;
492 	    }
493 	  else
494 	    breg = reg1;
495 	}
496 
497       if (offset != 0)
498 	{
499 	  if (flag_pic && symbolic_operand (offset, SImode))
500 	    {
501 	      if (breg && ireg)
502 		{
503 		  debug_rtx (orig);
504 		  output_operand_lossage ("symbol used with both base and indexed registers");
505 		}
506 
507 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
508 	      if (flag_pic > 1 && GET_CODE (offset) == CONST
509 		  && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
510 		  && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
511 		{
512 		  debug_rtx (orig);
513 		  output_operand_lossage ("symbol with offset used in PIC mode");
514 		}
515 #endif
516 
517 	      /* symbol(reg) isn't PIC, but symbol[reg] is.  */
518 	      if (breg)
519 		{
520 		  ireg = breg;
521 		  breg = 0;
522 		}
523 
524 	    }
525 
526 	  output_address (VOIDmode, offset);
527 	}
528 
529       if (breg != 0)
530 	fprintf (file, "(%s)", reg_names[REGNO (breg)]);
531 
532       if (ireg != 0)
533 	{
534 	  if (GET_CODE (ireg) == MULT)
535 	    ireg = XEXP (ireg, 0);
536 	  if (! REG_P (ireg))
537 	    {
538 	      debug_rtx (orig);
539 	      output_operand_lossage ("non-register index expression");
540 	    }
541 	  fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
542 	}
543       break;
544 
545     default:
546       gcc_assert (! REG_P(addr));
547       output_addr_const (file, addr);
548     }
549 }
550 
551 void
print_operand(FILE * file,rtx x,int code)552 print_operand (FILE *file, rtx x, int code)
553 {
554   if (code == '#')
555     fputc (ASM_DOUBLE_CHAR, file);
556   else if (code == '|')
557     fputs (REGISTER_PREFIX, file);
558   else if (code == 'c')
559     fputs (cond_name (x), file);
560   else if (code == 'C')
561     fputs (rev_cond_name (x), file);
562   else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
563     fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
564   else if (code == 'P' && CONST_INT_P (x))
565     fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
566   else if (code == 'N' && CONST_INT_P (x))
567     fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
568   /* rotl instruction cannot deal with negative arguments.  */
569   else if (code == 'R' && CONST_INT_P (x))
570     fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
571   else if (code == 'H' && CONST_INT_P (x))
572     fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
573   else if (code == 'h' && CONST_INT_P (x))
574     fprintf (file, "$%d", (short) - INTVAL (x));
575   else if (code == 'B' && CONST_INT_P (x))
576     fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
577   else if (code == 'b' && CONST_INT_P (x))
578     fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
579   else if (code == 'M' && CONST_INT_P (x))
580     fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
581   else if (code == 'x' && CONST_INT_P (x))
582     fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
583   else if (REG_P (x))
584     fprintf (file, "%s", reg_names[REGNO (x)]);
585   else if (MEM_P (x))
586     output_address (GET_MODE (x), XEXP (x, 0));
587   else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
588     {
589       char dstr[30];
590       real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
591 		       sizeof (dstr), 0, 1);
592       fprintf (file, "$0f%s", dstr);
593     }
594   else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
595     {
596       char dstr[30];
597       real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
598 		       sizeof (dstr), 0, 1);
599       fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
600     }
601   else if (GET_CODE (x) == SUBREG)
602     {
603       debug_rtx (x);
604       output_operand_lossage ("SUBREG operand");
605     }
606   else
607     {
608       if (flag_pic > 1 && symbolic_operand (x, SImode))
609 	{
610 	  debug_rtx (x);
611 	  output_operand_lossage ("symbol used as immediate operand");
612 	}
613       putc ('$', file);
614       output_addr_const (file, x);
615     }
616 }
617 
618 const char *
cond_name(rtx op)619 cond_name (rtx op)
620 {
621   switch (GET_CODE (op))
622     {
623     case NE:
624       return "neq";
625     case EQ:
626       return "eql";
627     case GE:
628       return "geq";
629     case GT:
630       return "gtr";
631     case LE:
632       return "leq";
633     case LT:
634       return "lss";
635     case GEU:
636       return "gequ";
637     case GTU:
638       return "gtru";
639     case LEU:
640       return "lequ";
641     case LTU:
642       return "lssu";
643 
644     default:
645       gcc_unreachable ();
646     }
647 }
648 
649 const char *
rev_cond_name(rtx op)650 rev_cond_name (rtx op)
651 {
652   switch (GET_CODE (op))
653     {
654     case EQ:
655       return "neq";
656     case NE:
657       return "eql";
658     case LT:
659       return "geq";
660     case LE:
661       return "gtr";
662     case GT:
663       return "leq";
664     case GE:
665       return "lss";
666     case LTU:
667       return "gequ";
668     case LEU:
669       return "gtru";
670     case GTU:
671       return "lequ";
672     case GEU:
673       return "lssu";
674 
675     default:
676       gcc_unreachable ();
677     }
678 }
679 
680 static bool
vax_float_literal(rtx c)681 vax_float_literal (rtx c)
682 {
683   machine_mode mode;
684   const REAL_VALUE_TYPE *r;
685   REAL_VALUE_TYPE s;
686   int i;
687 
688   if (GET_CODE (c) != CONST_DOUBLE)
689     return false;
690 
691   mode = GET_MODE (c);
692 
693   if (c == const_tiny_rtx[(int) mode][0]
694       || c == const_tiny_rtx[(int) mode][1]
695       || c == const_tiny_rtx[(int) mode][2])
696     return true;
697 
698   r = CONST_DOUBLE_REAL_VALUE (c);
699 
700   for (i = 0; i < 7; i++)
701     {
702       int x = 1 << i;
703       bool ok;
704       real_from_integer (&s, mode, x, SIGNED);
705 
706       if (real_equal (r, &s))
707 	return true;
708       ok = exact_real_inverse (mode, &s);
709       gcc_assert (ok);
710       if (real_equal (r, &s))
711 	return true;
712     }
713   return false;
714 }
715 
716 
717 /* Return the cost in cycles of a memory address, relative to register
718    indirect.
719 
720    Each of the following adds the indicated number of cycles:
721 
722    1 - symbolic address
723    1 - pre-decrement
724    1 - indexing and/or offset(register)
725    2 - indirect */
726 
727 
728 static int
vax_address_cost_1(rtx addr)729 vax_address_cost_1 (rtx addr)
730 {
731   int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
732   rtx plus_op0 = 0, plus_op1 = 0;
733  restart:
734   switch (GET_CODE (addr))
735     {
736     case PRE_DEC:
737       predec = 1;
738       /* FALLTHRU */
739     case REG:
740     case SUBREG:
741     case POST_INC:
742       reg = 1;
743       break;
744     case MULT:
745       indexed = 1;	/* 2 on VAX 2 */
746       break;
747     case CONST_INT:
748       /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
749       if (offset == 0)
750 	offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
751       break;
752     case CONST:
753     case SYMBOL_REF:
754       offset = 1;	/* 2 on VAX 2 */
755       break;
756     case LABEL_REF:	/* this is probably a byte offset from the pc */
757       if (offset == 0)
758 	offset = 1;
759       break;
760     case PLUS:
761       if (plus_op0)
762 	plus_op1 = XEXP (addr, 0);
763       else
764 	plus_op0 = XEXP (addr, 0);
765       addr = XEXP (addr, 1);
766       goto restart;
767     case MEM:
768       indir = 2;	/* 3 on VAX 2 */
769       addr = XEXP (addr, 0);
770       goto restart;
771     default:
772       break;
773     }
774 
775   /* Up to 3 things can be added in an address.  They are stored in
776      plus_op0, plus_op1, and addr.  */
777 
778   if (plus_op0)
779     {
780       addr = plus_op0;
781       plus_op0 = 0;
782       goto restart;
783     }
784   if (plus_op1)
785     {
786       addr = plus_op1;
787       plus_op1 = 0;
788       goto restart;
789     }
790   /* Indexing and register+offset can both be used (except on a VAX 2)
791      without increasing execution time over either one alone.  */
792   if (reg && indexed && offset)
793     return reg + indir + offset + predec;
794   return reg + indexed + indir + offset + predec;
795 }
796 
797 static int
vax_address_cost(rtx x,machine_mode mode ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED,bool speed ATTRIBUTE_UNUSED)798 vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
799 		  addr_space_t as ATTRIBUTE_UNUSED,
800 		  bool speed ATTRIBUTE_UNUSED)
801 {
802   return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
803 }
804 
805 /* Cost of an expression on a VAX.  This version has costs tuned for the
806    CVAX chip (found in the VAX 3 series) with comments for variations on
807    other models.
808 
809    FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
810    and FLOAT_TRUNCATE.  We need a -mcpu option to allow provision of
811    costs on a per cpu basis.  */
812 
813 static bool
vax_rtx_costs(rtx x,machine_mode mode,int outer_code,int opno ATTRIBUTE_UNUSED,int * total,bool speed ATTRIBUTE_UNUSED)814 vax_rtx_costs (rtx x, machine_mode mode, int outer_code,
815 	       int opno ATTRIBUTE_UNUSED,
816 	       int *total, bool speed ATTRIBUTE_UNUSED)
817 {
818   int code = GET_CODE (x);
819   int i = 0;				   /* may be modified in switch */
820   const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
821 
822   switch (code)
823     {
824       /* On a VAX, constants from 0..63 are cheap because they can use the
825 	 1 byte literal constant format.  Compare to -1 should be made cheap
826 	 so that decrement-and-branch insns can be formed more easily (if
827 	 the value -1 is copied to a register some decrement-and-branch
828 	 patterns will not match).  */
829     case CONST_INT:
830       if (INTVAL (x) == 0)
831 	{
832 	  *total = 0;
833 	  return true;
834 	}
835       if (outer_code == AND)
836 	{
837 	  *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
838 	  return true;
839 	}
840       if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
841 	  || (outer_code == COMPARE
842 	      && INTVAL (x) == -1)
843 	  || ((outer_code == PLUS || outer_code == MINUS)
844 	      && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
845 	{
846 	  *total = 1;
847 	  return true;
848 	}
849       /* FALLTHRU */
850 
851     case CONST:
852     case LABEL_REF:
853     case SYMBOL_REF:
854       *total = 3;
855       return true;
856 
857     case CONST_DOUBLE:
858       if (GET_MODE_CLASS (mode) == MODE_FLOAT)
859 	*total = vax_float_literal (x) ? 5 : 8;
860       else
861 	*total = ((CONST_DOUBLE_HIGH (x) == 0
862 		   && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
863 		  || (outer_code == PLUS
864 		      && CONST_DOUBLE_HIGH (x) == -1
865 		      && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
866 		 ? 2 : 5;
867       return true;
868 
869     case POST_INC:
870       *total = 2;
871       return true;		/* Implies register operand.  */
872 
873     case PRE_DEC:
874       *total = 3;
875       return true;		/* Implies register operand.  */
876 
877     case MULT:
878       switch (mode)
879 	{
880 	case E_DFmode:
881 	  *total = 16;		/* 4 on VAX 9000 */
882 	  break;
883 	case E_SFmode:
884 	  *total = 9;		/* 4 on VAX 9000, 12 on VAX 2 */
885 	  break;
886 	case E_DImode:
887 	  *total = 16;		/* 6 on VAX 9000, 28 on VAX 2 */
888 	  break;
889 	case E_SImode:
890 	case E_HImode:
891 	case E_QImode:
892 	  *total = 10;		/* 3-4 on VAX 9000, 20-28 on VAX 2 */
893 	  break;
894 	default:
895 	  *total = MAX_COST;	/* Mode is not supported.  */
896 	  return true;
897 	}
898       break;
899 
900     case UDIV:
901       if (mode != SImode)
902 	{
903 	  *total = MAX_COST;	/* Mode is not supported.  */
904 	  return true;
905 	}
906       *total = 17;
907       break;
908 
909     case DIV:
910       if (mode == DImode)
911 	*total = 30;		/* Highly variable.  */
912       else if (mode == DFmode)
913 	/* divide takes 28 cycles if the result is not zero, 13 otherwise */
914 	*total = 24;
915       else
916 	*total = 11;		/* 25 on VAX 2 */
917       break;
918 
919     case MOD:
920       *total = 23;
921       break;
922 
923     case UMOD:
924       if (mode != SImode)
925 	{
926 	  *total = MAX_COST;	/* Mode is not supported.  */
927 	  return true;
928 	}
929       *total = 29;
930       break;
931 
932     case FLOAT:
933       *total = (6		/* 4 on VAX 9000 */
934 		+ (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
935       break;
936 
937     case FIX:
938       *total = 7;		/* 17 on VAX 2 */
939       break;
940 
941     case ASHIFT:
942     case LSHIFTRT:
943     case ASHIFTRT:
944       if (mode == DImode)
945 	*total = 12;
946       else
947 	*total = 10;		/* 6 on VAX 9000 */
948       break;
949 
950     case ROTATE:
951     case ROTATERT:
952       *total = 6;		/* 5 on VAX 2, 4 on VAX 9000 */
953       if (CONST_INT_P (XEXP (x, 1)))
954 	fmt = "e"; 		/* all constant rotate counts are short */
955       break;
956 
957     case PLUS:
958     case MINUS:
959       *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
960       /* Small integer operands can use subl2 and addl2.  */
961       if ((CONST_INT_P (XEXP (x, 1)))
962 	  && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
963 	fmt = "e";
964       break;
965 
966     case IOR:
967     case XOR:
968       *total = 3;
969       break;
970 
971     case AND:
972       /* AND is special because the first operand is complemented.  */
973       *total = 3;
974       if (CONST_INT_P (XEXP (x, 0)))
975 	{
976 	  if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
977 	    *total = 4;
978 	  fmt = "e";
979 	  i = 1;
980 	}
981       break;
982 
983     case NEG:
984       if (mode == DFmode)
985 	*total = 9;
986       else if (mode == SFmode)
987 	*total = 6;
988       else if (mode == DImode)
989 	*total = 4;
990       else
991 	*total = 2;
992       break;
993 
994     case NOT:
995       *total = 2;
996       break;
997 
998     case ZERO_EXTRACT:
999     case SIGN_EXTRACT:
1000       *total = 15;
1001       break;
1002 
1003     case MEM:
1004       if (mode == DImode || mode == DFmode)
1005 	*total = 5;		/* 7 on VAX 2 */
1006       else
1007 	*total = 3;		/* 4 on VAX 2 */
1008       x = XEXP (x, 0);
1009       if (!REG_P (x) && GET_CODE (x) != POST_INC)
1010 	*total += vax_address_cost_1 (x);
1011       return true;
1012 
1013     case FLOAT_EXTEND:
1014     case FLOAT_TRUNCATE:
1015     case TRUNCATE:
1016       *total = 3;		/* FIXME: Costs need to be checked  */
1017       break;
1018 
1019     default:
1020       return false;
1021     }
1022 
1023   /* Now look inside the expression.  Operands which are not registers or
1024      short constants add to the cost.
1025 
1026      FMT and I may have been adjusted in the switch above for instructions
1027      which require special handling.  */
1028 
1029   while (*fmt++ == 'e')
1030     {
1031       rtx op = XEXP (x, i);
1032 
1033       i += 1;
1034       code = GET_CODE (op);
1035 
1036       /* A NOT is likely to be found as the first operand of an AND
1037 	 (in which case the relevant cost is of the operand inside
1038 	 the not) and not likely to be found anywhere else.  */
1039       if (code == NOT)
1040 	op = XEXP (op, 0), code = GET_CODE (op);
1041 
1042       switch (code)
1043 	{
1044 	case CONST_INT:
1045 	  if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
1046 	      && mode != QImode)
1047 	    *total += 1;	/* 2 on VAX 2 */
1048 	  break;
1049 	case CONST:
1050 	case LABEL_REF:
1051 	case SYMBOL_REF:
1052 	  *total += 1;		/* 2 on VAX 2 */
1053 	  break;
1054 	case CONST_DOUBLE:
1055 	  if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
1056 	    {
1057 	      /* Registers are faster than floating point constants -- even
1058 		 those constants which can be encoded in a single byte.  */
1059 	      if (vax_float_literal (op))
1060 		*total += 1;
1061 	      else
1062 		*total += (GET_MODE (x) == DFmode) ? 3 : 2;
1063 	    }
1064 	  else
1065 	    {
1066 	      if (CONST_DOUBLE_HIGH (op) != 0
1067 		  || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1068 		*total += 2;
1069 	    }
1070 	  break;
1071 	case MEM:
1072 	  *total += 1;		/* 2 on VAX 2 */
1073 	  if (!REG_P (XEXP (op, 0)))
1074 	    *total += vax_address_cost_1 (XEXP (op, 0));
1075 	  break;
1076 	case REG:
1077 	case SUBREG:
1078 	  break;
1079 	default:
1080 	  *total += 1;
1081 	  break;
1082 	}
1083     }
1084   return true;
1085 }
1086 
1087 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1088    Used for C++ multiple inheritance.
1089 	.mask	^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11>  #conservative entry mask
1090 	addl2	$DELTA, 4(ap)	#adjust first argument
1091 	jmp	FUNCTION+2	#jump beyond FUNCTION's entry mask
1092 */
1093 
1094 static void
vax_output_mi_thunk(FILE * file,tree thunk ATTRIBUTE_UNUSED,HOST_WIDE_INT delta,HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,tree function)1095 vax_output_mi_thunk (FILE * file,
1096 		     tree thunk ATTRIBUTE_UNUSED,
1097 		     HOST_WIDE_INT delta,
1098 		     HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1099 		     tree function)
1100 {
1101   const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk));
1102 
1103   assemble_start_function (thunk, fnname);
1104   fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1105   asm_fprintf (file, ",4(%Rap)\n");
1106   fprintf (file, "\tjmp ");
1107   assemble_name (file,  XSTR (XEXP (DECL_RTL (function), 0), 0));
1108   fprintf (file, "+2\n");
1109   assemble_end_function (thunk, fnname);
1110 }
1111 
1112 static rtx
vax_struct_value_rtx(tree fntype ATTRIBUTE_UNUSED,int incoming ATTRIBUTE_UNUSED)1113 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1114 		      int incoming ATTRIBUTE_UNUSED)
1115 {
1116   return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1117 }
1118 
1119 /* Worker function for NOTICE_UPDATE_CC.  */
1120 
1121 void
vax_notice_update_cc(rtx exp,rtx insn ATTRIBUTE_UNUSED)1122 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1123 {
1124   if (GET_CODE (exp) == SET)
1125     {
1126       if (GET_CODE (SET_SRC (exp)) == CALL)
1127 	CC_STATUS_INIT;
1128       else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1129 	       && GET_CODE (SET_DEST (exp)) != PC)
1130 	{
1131 	  cc_status.flags = 0;
1132 	  /* The integer operations below don't set carry or
1133 	     set it in an incompatible way.  That's ok though
1134 	     as the Z bit is all we need when doing unsigned
1135 	     comparisons on the result of these insns (since
1136 	     they're always with 0).  Set CC_NO_OVERFLOW to
1137 	     generate the correct unsigned branches.  */
1138 	  switch (GET_CODE (SET_SRC (exp)))
1139 	    {
1140 	    case NEG:
1141 	      if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1142 		break;
1143 	      /* FALLTHRU */
1144 	    case AND:
1145 	    case IOR:
1146 	    case XOR:
1147 	    case NOT:
1148 	    case CTZ:
1149 	    case MEM:
1150 	    case REG:
1151 	      cc_status.flags = CC_NO_OVERFLOW;
1152 	      break;
1153 	    default:
1154 	      break;
1155 	    }
1156 	  cc_status.value1 = SET_DEST (exp);
1157 	  cc_status.value2 = SET_SRC (exp);
1158 	}
1159     }
1160   else if (GET_CODE (exp) == PARALLEL
1161 	   && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1162     {
1163       if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1164 	CC_STATUS_INIT;
1165       else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1166 	{
1167 	  cc_status.flags = 0;
1168 	  cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1169 	  cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1170 	}
1171       else
1172 	/* PARALLELs whose first element sets the PC are aob,
1173 	   sob insns.  They do change the cc's.  */
1174 	CC_STATUS_INIT;
1175     }
1176   else
1177     CC_STATUS_INIT;
1178   if (cc_status.value1 && REG_P (cc_status.value1)
1179       && cc_status.value2
1180       && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1181     cc_status.value2 = 0;
1182   if (cc_status.value1 && MEM_P (cc_status.value1)
1183       && cc_status.value2
1184       && MEM_P (cc_status.value2))
1185     cc_status.value2 = 0;
1186   /* Actual condition, one line up, should be that value2's address
1187      depends on value1, but that is too much of a pain.  */
1188 }
1189 
1190 /* Output integer move instructions.  */
1191 
1192 const char *
vax_output_int_move(rtx insn ATTRIBUTE_UNUSED,rtx * operands,machine_mode mode)1193 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1194 		     machine_mode mode)
1195 {
1196   rtx hi[3], lo[3];
1197   const char *pattern_hi, *pattern_lo;
1198 
1199   switch (mode)
1200     {
1201     case E_DImode:
1202       if (operands[1] == const0_rtx)
1203 	return "clrq %0";
1204       if (TARGET_QMATH && optimize_size
1205 	  && (CONST_INT_P (operands[1])
1206 	      || GET_CODE (operands[1]) == CONST_DOUBLE))
1207 	{
1208 	  unsigned HOST_WIDE_INT hval, lval;
1209 	  int n;
1210 
1211 	  if (GET_CODE (operands[1]) == CONST_DOUBLE)
1212 	    {
1213 	      gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1214 
1215 	      /* Make sure only the low 32 bits are valid.  */
1216 	      lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1217 	      hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1218 	    }
1219 	  else
1220 	    {
1221 	      lval = INTVAL (operands[1]);
1222 	      hval = 0;
1223 	    }
1224 
1225 	  /* Here we see if we are trying to see if the 64bit value is really
1226 	     a 6bit shifted some arbitrary amount.  If so, we can use ashq to
1227 	     shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1228 	     8 bytes - 1 shift byte - 1 short literal byte.  */
1229 	  if (lval != 0
1230 	      && (n = exact_log2 (lval & (- lval))) != -1
1231 	      && (lval >> n) < 64)
1232 	    {
1233 	      lval >>= n;
1234 
1235 	      /* On 32bit platforms, if the 6bits didn't overflow into the
1236 		 upper 32bit value that value better be 0.  If we have
1237 		 overflowed, make sure it wasn't too much.  */
1238 	      if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1239 		{
1240 		  if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1241 		    n = 0;	/* failure */
1242 		  else
1243 		    lval |= hval << (32 - n);
1244 		}
1245 	      /*  If n is 0, then ashq is not the best way to emit this.  */
1246 	      if (n > 0)
1247 		{
1248 		  operands[1] = GEN_INT (lval);
1249 		  operands[2] = GEN_INT (n);
1250 		  return "ashq %2,%D1,%0";
1251 		}
1252 #if HOST_BITS_PER_WIDE_INT == 32
1253 	    }
1254 	  /* On 32bit platforms, if the low 32bit value is 0, checkout the
1255 	     upper 32bit value.  */
1256 	  else if (hval != 0
1257 		   && (n = exact_log2 (hval & (- hval)) - 1) != -1
1258 		   && (hval >> n) < 64)
1259 	    {
1260 	      operands[1] = GEN_INT (hval >> n);
1261 	      operands[2] = GEN_INT (n + 32);
1262 	      return "ashq %2,%D1,%0";
1263 #endif
1264 	    }
1265 	}
1266 
1267       if (TARGET_QMATH
1268 	  && (!MEM_P (operands[0])
1269 	      || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1270 	      || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1271 	      || !illegal_addsub_di_memory_operand (operands[0], DImode))
1272 	  && ((CONST_INT_P (operands[1])
1273 	       && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1274 	      || GET_CODE (operands[1]) == CONST_DOUBLE))
1275 	{
1276 	  hi[0] = operands[0];
1277 	  hi[1] = operands[1];
1278 
1279 	  split_quadword_operands (insn, SET, hi, lo, 2);
1280 
1281 	  pattern_lo = vax_output_int_move (NULL, lo, SImode);
1282 	  pattern_hi = vax_output_int_move (NULL, hi, SImode);
1283 
1284 	  /* The patterns are just movl/movl or pushl/pushl then a movq will
1285 	     be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1286 	     bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1287 	     value bytes.  */
1288 	  if ((!strncmp (pattern_lo, "movl", 4)
1289 	      && !strncmp (pattern_hi, "movl", 4))
1290 	      || (!strncmp (pattern_lo, "pushl", 5)
1291 		  && !strncmp (pattern_hi, "pushl", 5)))
1292 	    return "movq %1,%0";
1293 
1294 	  if (MEM_P (operands[0])
1295 	      && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1296 	    {
1297 	      output_asm_insn (pattern_hi, hi);
1298 	      operands[0] = lo[0];
1299 	      operands[1] = lo[1];
1300 	      operands[2] = lo[2];
1301 	      return pattern_lo;
1302 	    }
1303 	  else
1304 	    {
1305 	      output_asm_insn (pattern_lo, lo);
1306 	      operands[0] = hi[0];
1307 	      operands[1] = hi[1];
1308 	      operands[2] = hi[2];
1309 	      return pattern_hi;
1310 	    }
1311 	}
1312       return "movq %1,%0";
1313 
1314     case E_SImode:
1315       if (symbolic_operand (operands[1], SImode))
1316 	{
1317 	  if (push_operand (operands[0], SImode))
1318 	    return "pushab %a1";
1319 	  return "movab %a1,%0";
1320 	}
1321 
1322       if (operands[1] == const0_rtx)
1323 	{
1324 	  if (push_operand (operands[0], SImode))
1325 	    return "pushl %1";
1326 	  return "clrl %0";
1327 	}
1328 
1329       if (CONST_INT_P (operands[1])
1330 	  && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1331 	{
1332 	  HOST_WIDE_INT i = INTVAL (operands[1]);
1333 	  int n;
1334 	  if ((unsigned HOST_WIDE_INT)(~i) < 64)
1335 	    return "mcoml %N1,%0";
1336 	  if ((unsigned HOST_WIDE_INT)i < 0x100)
1337 	    return "movzbl %1,%0";
1338 	  if (i >= -0x80 && i < 0)
1339 	    return "cvtbl %1,%0";
1340 	  if (optimize_size
1341 	      && (n = exact_log2 (i & (-i))) != -1
1342 	      && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1343 	    {
1344 	      operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1345 	      operands[2] = GEN_INT (n);
1346 	      return "ashl %2,%1,%0";
1347 	    }
1348 	  if ((unsigned HOST_WIDE_INT)i < 0x10000)
1349 	    return "movzwl %1,%0";
1350 	  if (i >= -0x8000 && i < 0)
1351 	    return "cvtwl %1,%0";
1352 	}
1353       if (push_operand (operands[0], SImode))
1354 	return "pushl %1";
1355       return "movl %1,%0";
1356 
1357     case E_HImode:
1358       if (CONST_INT_P (operands[1]))
1359 	{
1360 	  HOST_WIDE_INT i = INTVAL (operands[1]);
1361 	  if (i == 0)
1362 	    return "clrw %0";
1363 	  else if ((unsigned HOST_WIDE_INT)i < 64)
1364 	    return "movw %1,%0";
1365 	  else if ((unsigned HOST_WIDE_INT)~i < 64)
1366 	    return "mcomw %H1,%0";
1367 	  else if ((unsigned HOST_WIDE_INT)i < 256)
1368 	    return "movzbw %1,%0";
1369 	  else if (i >= -0x80 && i < 0)
1370 	    return "cvtbw %1,%0";
1371 	}
1372       return "movw %1,%0";
1373 
1374     case E_QImode:
1375       if (CONST_INT_P (operands[1]))
1376 	{
1377 	  HOST_WIDE_INT i = INTVAL (operands[1]);
1378 	  if (i == 0)
1379 	    return "clrb %0";
1380 	  else if ((unsigned HOST_WIDE_INT)~i < 64)
1381 	    return "mcomb %B1,%0";
1382 	}
1383       return "movb %1,%0";
1384 
1385     default:
1386       gcc_unreachable ();
1387     }
1388 }
1389 
1390 /* Output integer add instructions.
1391 
1392    The space-time-opcode tradeoffs for addition vary by model of VAX.
1393 
1394    On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1395    but it not faster on other models.
1396 
1397    "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1398    faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1399    a register is used in an address too soon after it is set.
1400    Compromise by using movab only when it is shorter than the add
1401    or the base register in the address is one of sp, ap, and fp,
1402    which are not modified very often.  */
1403 
1404 const char *
vax_output_int_add(rtx_insn * insn,rtx * operands,machine_mode mode)1405 vax_output_int_add (rtx_insn *insn, rtx *operands, machine_mode mode)
1406 {
1407   switch (mode)
1408     {
1409     case E_DImode:
1410       {
1411 	rtx low[3];
1412 	const char *pattern;
1413 	int carry = 1;
1414 	bool sub;
1415 
1416 	if (TARGET_QMATH && 0)
1417 	  debug_rtx (insn);
1418 
1419 	split_quadword_operands (insn, PLUS, operands, low, 3);
1420 
1421 	if (TARGET_QMATH)
1422 	  {
1423 	    gcc_assert (rtx_equal_p (operands[0], operands[1]));
1424 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1425 	    gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1426 	    gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1427 #endif
1428 
1429 	    /* No reason to add a 0 to the low part and thus no carry, so just
1430 	       emit the appropriate add/sub instruction.  */
1431 	    if (low[2] == const0_rtx)
1432 	      return vax_output_int_add (NULL, operands, SImode);
1433 
1434 	    /* Are we doing addition or subtraction?  */
1435 	    sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1436 
1437 	    /* We can't use vax_output_int_add since some the patterns don't
1438 	       modify the carry bit.  */
1439 	    if (sub)
1440 	      {
1441 		if (low[2] == constm1_rtx)
1442 		  pattern = "decl %0";
1443 		else
1444 		  pattern = "subl2 $%n2,%0";
1445 	      }
1446 	    else
1447 	      {
1448 		if (low[2] == const1_rtx)
1449 		  pattern = "incl %0";
1450 		else
1451 		  pattern = "addl2 %2,%0";
1452 	      }
1453 	    output_asm_insn (pattern, low);
1454 
1455 	    /* In 2's complement, -n = ~n + 1.  Since we are dealing with
1456 	       two 32bit parts, we complement each and then add one to
1457 	       low part.  We know that the low part can't overflow since
1458 	       it's value can never be 0.  */
1459 	    if (sub)
1460 		return "sbwc %N2,%0";
1461 	    return "adwc %2,%0";
1462 	  }
1463 
1464 	/* Add low parts.  */
1465 	if (rtx_equal_p (operands[0], operands[1]))
1466 	  {
1467 	    if (low[2] == const0_rtx)
1468 	/* Should examine operand, punt if not POST_INC.  */
1469 	      pattern = "tstl %0", carry = 0;
1470 	    else if (low[2] == const1_rtx)
1471 	      pattern = "incl %0";
1472 	    else
1473 	      pattern = "addl2 %2,%0";
1474 	  }
1475 	else
1476 	  {
1477 	    if (low[2] == const0_rtx)
1478 	      pattern = "movl %1,%0", carry = 0;
1479 	    else
1480 	      pattern = "addl3 %2,%1,%0";
1481 	  }
1482 	if (pattern)
1483 	  output_asm_insn (pattern, low);
1484 	if (!carry)
1485 	  /* If CARRY is 0, we don't have any carry value to worry about.  */
1486 	  return get_insn_template (CODE_FOR_addsi3, insn);
1487 	/* %0 = C + %1 + %2 */
1488 	if (!rtx_equal_p (operands[0], operands[1]))
1489 	  output_asm_insn ((operands[1] == const0_rtx
1490 			    ? "clrl %0"
1491 			    : "movl %1,%0"), operands);
1492 	return "adwc %2,%0";
1493       }
1494 
1495     case E_SImode:
1496       if (rtx_equal_p (operands[0], operands[1]))
1497 	{
1498 	  if (operands[2] == const1_rtx)
1499 	    return "incl %0";
1500 	  if (operands[2] == constm1_rtx)
1501 	    return "decl %0";
1502 	  if (CONST_INT_P (operands[2])
1503 	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1504 	    return "subl2 $%n2,%0";
1505 	  if (CONST_INT_P (operands[2])
1506 	      && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1507 	      && REG_P (operands[1])
1508 	      && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1509 		   || REGNO (operands[1]) > 11))
1510 	    return "movab %c2(%1),%0";
1511 	  if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1512 	    return "movab %a2[%0],%0";
1513 	  return "addl2 %2,%0";
1514 	}
1515 
1516       if (rtx_equal_p (operands[0], operands[2]))
1517 	{
1518 	  if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1519 	    return "movab %a1[%0],%0";
1520 	  return "addl2 %1,%0";
1521 	}
1522 
1523       if (CONST_INT_P (operands[2])
1524 	  && INTVAL (operands[2]) < 32767
1525 	  && INTVAL (operands[2]) > -32768
1526 	  && REG_P (operands[1])
1527 	  && push_operand (operands[0], SImode))
1528 	return "pushab %c2(%1)";
1529 
1530       if (CONST_INT_P (operands[2])
1531 	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1532 	return "subl3 $%n2,%1,%0";
1533 
1534       if (CONST_INT_P (operands[2])
1535 	  && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1536 	  && REG_P (operands[1])
1537 	  && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1538 	       || REGNO (operands[1]) > 11))
1539 	return "movab %c2(%1),%0";
1540 
1541       /* Add this if using gcc on a VAX 3xxx:
1542       if (REG_P (operands[1]) && REG_P (operands[2]))
1543 	return "movab (%1)[%2],%0";
1544       */
1545 
1546       if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1547 	{
1548 	  if (push_operand (operands[0], SImode))
1549 	    return "pushab %a2[%1]";
1550 	  return "movab %a2[%1],%0";
1551 	}
1552 
1553       if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1554 	{
1555 	  if (push_operand (operands[0], SImode))
1556 	    return "pushab %a1[%2]";
1557 	  return "movab %a1[%2],%0";
1558 	}
1559 
1560       if (flag_pic && REG_P (operands[0])
1561 	  && symbolic_operand (operands[2], SImode))
1562 	return "movab %a2,%0;addl2 %1,%0";
1563 
1564       if (flag_pic
1565 	  && (symbolic_operand (operands[1], SImode)
1566 	      || symbolic_operand (operands[1], SImode)))
1567 	debug_rtx (insn);
1568 
1569       return "addl3 %1,%2,%0";
1570 
1571     case E_HImode:
1572       if (rtx_equal_p (operands[0], operands[1]))
1573 	{
1574 	  if (operands[2] == const1_rtx)
1575 	    return "incw %0";
1576 	  if (operands[2] == constm1_rtx)
1577 	    return "decw %0";
1578 	  if (CONST_INT_P (operands[2])
1579 	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1580 	    return "subw2 $%n2,%0";
1581 	  return "addw2 %2,%0";
1582 	}
1583       if (rtx_equal_p (operands[0], operands[2]))
1584 	return "addw2 %1,%0";
1585       if (CONST_INT_P (operands[2])
1586 	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1587 	return "subw3 $%n2,%1,%0";
1588       return "addw3 %1,%2,%0";
1589 
1590     case E_QImode:
1591       if (rtx_equal_p (operands[0], operands[1]))
1592 	{
1593 	  if (operands[2] == const1_rtx)
1594 	    return "incb %0";
1595 	  if (operands[2] == constm1_rtx)
1596 	    return "decb %0";
1597 	  if (CONST_INT_P (operands[2])
1598 	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1599 	    return "subb2 $%n2,%0";
1600 	  return "addb2 %2,%0";
1601 	}
1602       if (rtx_equal_p (operands[0], operands[2]))
1603 	return "addb2 %1,%0";
1604       if (CONST_INT_P (operands[2])
1605 	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1606 	return "subb3 $%n2,%1,%0";
1607       return "addb3 %1,%2,%0";
1608 
1609     default:
1610       gcc_unreachable ();
1611     }
1612 }
1613 
1614 const char *
vax_output_int_subtract(rtx_insn * insn,rtx * operands,machine_mode mode)1615 vax_output_int_subtract (rtx_insn *insn, rtx *operands, machine_mode mode)
1616 {
1617   switch (mode)
1618     {
1619     case E_DImode:
1620       {
1621 	rtx low[3];
1622 	const char *pattern;
1623 	int carry = 1;
1624 
1625 	if (TARGET_QMATH && 0)
1626 	  debug_rtx (insn);
1627 
1628 	split_quadword_operands (insn, MINUS, operands, low, 3);
1629 
1630 	if (TARGET_QMATH)
1631 	  {
1632 	    if (operands[1] == const0_rtx && low[1] == const0_rtx)
1633 	      {
1634 		/* Negation is tricky.  It's basically complement and increment.
1635 		   Negate hi, then lo, and subtract the carry back.  */
1636 		if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1637 		    || (MEM_P (operands[0])
1638 			&& GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1639 		  fatal_insn ("illegal operand detected", insn);
1640 		output_asm_insn ("mnegl %2,%0", operands);
1641 		output_asm_insn ("mnegl %2,%0", low);
1642 		return "sbwc $0,%0";
1643 	      }
1644 	    gcc_assert (rtx_equal_p (operands[0], operands[1]));
1645 	    gcc_assert (rtx_equal_p (low[0], low[1]));
1646 	    if (low[2] == const1_rtx)
1647 	      output_asm_insn ("decl %0", low);
1648 	    else
1649 	      output_asm_insn ("subl2 %2,%0", low);
1650 	    return "sbwc %2,%0";
1651 	  }
1652 
1653 	/* Subtract low parts.  */
1654 	if (rtx_equal_p (operands[0], operands[1]))
1655 	  {
1656 	    if (low[2] == const0_rtx)
1657 	      pattern = 0, carry = 0;
1658 	    else if (low[2] == constm1_rtx)
1659 	      pattern = "decl %0";
1660 	    else
1661 	      pattern = "subl2 %2,%0";
1662 	  }
1663 	else
1664 	  {
1665 	    if (low[2] == constm1_rtx)
1666 	      pattern = "decl %0";
1667 	    else if (low[2] == const0_rtx)
1668 	      pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1669 	    else
1670 	      pattern = "subl3 %2,%1,%0";
1671 	  }
1672 	if (pattern)
1673 	  output_asm_insn (pattern, low);
1674 	if (carry)
1675 	  {
1676 	    if (!rtx_equal_p (operands[0], operands[1]))
1677 	      return "movl %1,%0;sbwc %2,%0";
1678 	    return "sbwc %2,%0";
1679 	    /* %0 = %2 - %1 - C */
1680 	  }
1681 	return get_insn_template (CODE_FOR_subsi3, insn);
1682       }
1683 
1684     default:
1685       gcc_unreachable ();
1686   }
1687 }
1688 
1689 static rtx
mkrtx(enum rtx_code code,enum machine_mode mode,rtx base,HOST_WIDE_INT off)1690 mkrtx(enum rtx_code code, enum machine_mode mode, rtx base, HOST_WIDE_INT off)
1691 {
1692   rtx tmp;
1693 
1694   if (GET_CODE (base) == CONST)
1695     base = XEXP (base, 0);
1696 
1697   if (GET_CODE (base) == PLUS)
1698     {
1699       rtx a = XEXP (base, 0);
1700       rtx b = XEXP (base, 1);
1701       if (GET_CODE (b) == CONST)
1702 	b = XEXP (b, 0);
1703       if (CONST_INT_P (b))
1704 	{
1705           off += INTVAL (b);
1706           base = a;
1707 	}
1708       else if (REG_P (a) && GET_CODE (b) == SYMBOL_REF)
1709 	{
1710 	  if (off != 0)
1711 	    {
1712 	      base = gen_rtx_PLUS (Pmode, a, plus_constant(Pmode, b, off));
1713 	      off = 0;
1714 	    }
1715 	}
1716       else if (REG_P (a) && GET_CODE (b) == PLUS)
1717 	{
1718           off += INTVAL (XEXP (b, 1));
1719 	  base = gen_rtx_PLUS (Pmode, a, plus_constant(Pmode, XEXP (b, 0), off));
1720 	  off = 0;
1721 	}
1722       else
1723         {
1724 	  debug_rtx(base);
1725 	  gcc_unreachable ();
1726 	}
1727     }
1728   if (code == POST_INC)
1729     tmp = gen_rtx_POST_INC (SImode, base);
1730   else if (off == 0 || (REG_P (base) && code == REG))
1731     tmp = base;
1732   else
1733     tmp = plus_constant (Pmode, base, off);
1734   return gen_rtx_MEM (mode, tmp);
1735 }
1736 
1737 const char *
vax_output_movmemsi(rtx insn,rtx * operands)1738 vax_output_movmemsi (rtx insn, rtx *operands)
1739 {
1740   HOST_WIDE_INT n = INTVAL (operands[2]);
1741   HOST_WIDE_INT off;
1742   rtx src, dest;
1743   const char *pat = NULL;
1744   const enum rtx_code *src_codes;
1745   const enum rtx_code *dest_codes;
1746   int code_idx = 0;
1747   int mode_idx;
1748 
1749   static const enum machine_mode xmodes[4] =
1750     {
1751       QImode, HImode, SImode, DImode
1752     };
1753   static const char * const pats[4] =
1754     {
1755       "movb %1,%0", "movw %1,%0", "movl %1,%0", "movq %1,%0",
1756     };
1757   static const enum rtx_code codes[2][3] =
1758     {
1759       { PLUS, PLUS, PLUS },
1760       { POST_INC, POST_INC, REG },
1761     };
1762 
1763   src = XEXP (operands[1], 0);
1764 
1765   src_codes =
1766     codes[REG_P (src) && find_regno_note (insn, REG_DEAD, REGNO(src))];
1767 
1768   dest = XEXP (operands[0], 0);
1769 
1770   dest_codes =
1771     codes[REG_P (dest) && find_regno_note (insn, REG_DEAD, REGNO(dest))];
1772 
1773   for (off = 0, code_idx = 0, mode_idx = 3; mode_idx >= 0; mode_idx--)
1774     {
1775       const enum machine_mode mode = xmodes[mode_idx];
1776       const HOST_WIDE_INT mode_len = GET_MODE_SIZE (mode);
1777       for (; n >= mode_len; n -= mode_len, off += mode_len)
1778 	{
1779 	  if (pat != NULL)
1780 	    output_asm_insn (pat, operands);
1781 	  if (n == mode_len)
1782 	    code_idx = 2;
1783 	  operands[0] = mkrtx(dest_codes[code_idx], mode, dest, off);
1784 	  operands[1] = mkrtx(src_codes[code_idx], mode, src, off);
1785 	  if (pat == NULL)
1786 	    code_idx = 1;
1787 	  pat = pats[mode_idx];
1788 	}
1789     }
1790 
1791   return pat;
1792 }
1793 
1794 /* True if X is an rtx for a constant that is a valid address.  */
1795 
1796 bool
legitimate_constant_address_p(rtx x)1797 legitimate_constant_address_p (rtx x)
1798 {
1799   if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1800 	  || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1801     return true;
1802   if (GET_CODE (x) != CONST)
1803     return false;
1804 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1805   if (flag_pic
1806       && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1807       && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1808     return false;
1809 #endif
1810    gcc_assert (! REG_P (x));
1811    return true;
1812 }
1813 
1814 bool
legitimate_pic_operand_p(rtx x)1815 legitimate_pic_operand_p (rtx x)
1816 {
1817 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1818   if (GET_CODE (x) != CONST)
1819     return true;
1820   if (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1821       && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1822     return false;
1823 #endif
1824   return true;
1825 }
1826 
1827 /* The other macros defined here are used only in legitimate_address_p ().  */
1828 
1829 /* Nonzero if X is a hard reg that can be used as an index
1830    or, if not strict, if it is a pseudo reg.  */
1831 #define	INDEX_REGISTER_P(X, STRICT) \
1832 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1833 
1834 /* Nonzero if X is a hard reg that can be used as a base reg
1835    or, if not strict, if it is a pseudo reg.  */
1836 #define	BASE_REGISTER_P(X, STRICT) \
1837 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1838 
1839 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1840 
1841 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1842    are no SYMBOL_REFs for external symbols present.  */
1843 
1844 static bool
indirectable_constant_address_p(rtx x,bool indirect)1845 indirectable_constant_address_p (rtx x, bool indirect)
1846 {
1847   if (GET_CODE (x) == SYMBOL_REF)
1848     return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1849 
1850   if (GET_CODE (x) == CONST)
1851     return !flag_pic
1852 	   || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1853 	   || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1854 
1855   return CONSTANT_ADDRESS_P (x);
1856 }
1857 
1858 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1859 
1860 static bool
indirectable_constant_address_p(rtx x,bool indirect ATTRIBUTE_UNUSED)1861 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1862 {
1863   return CONSTANT_ADDRESS_P (x);
1864 }
1865 
1866 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1867 
1868 /* True if X is an address which can be indirected.  External symbols
1869    could be in a sharable image library, so we disallow those.  */
1870 
1871 static bool
indirectable_address_p(rtx x,bool strict,bool indirect)1872 indirectable_address_p (rtx x, bool strict, bool indirect)
1873 {
1874   if (indirectable_constant_address_p (x, indirect)
1875       || BASE_REGISTER_P (x, strict))
1876     return true;
1877   if (GET_CODE (x) != PLUS
1878       || !BASE_REGISTER_P (XEXP (x, 0), strict)
1879       || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1880     return false;
1881   return indirectable_constant_address_p (XEXP (x, 1), indirect);
1882 }
1883 
1884 /* Return true if x is a valid address not using indexing.
1885    (This much is the easy part.)  */
1886 static bool
nonindexed_address_p(rtx x,bool strict)1887 nonindexed_address_p (rtx x, bool strict)
1888 {
1889   rtx xfoo0;
1890   if (REG_P (x))
1891     {
1892       if (! reload_in_progress
1893 	  || reg_equiv_mem (REGNO (x)) == 0
1894 	  || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1895 	return true;
1896     }
1897   if (indirectable_constant_address_p (x, false))
1898     return true;
1899   if (indirectable_address_p (x, strict, false))
1900     return true;
1901   xfoo0 = XEXP (x, 0);
1902   if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1903     return true;
1904   if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1905       && BASE_REGISTER_P (xfoo0, strict))
1906     return true;
1907   return false;
1908 }
1909 
1910 /* True if PROD is either a reg times size of mode MODE and MODE is less
1911    than or equal 8 bytes, or just a reg if MODE is one byte.  */
1912 
1913 static bool
index_term_p(rtx prod,machine_mode mode,bool strict)1914 index_term_p (rtx prod, machine_mode mode, bool strict)
1915 {
1916   rtx xfoo0, xfoo1;
1917 
1918   if (GET_MODE_SIZE (mode) == 1)
1919     return BASE_REGISTER_P (prod, strict);
1920 
1921   if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1922     return false;
1923 
1924   xfoo0 = XEXP (prod, 0);
1925   xfoo1 = XEXP (prod, 1);
1926 
1927   if (CONST_INT_P (xfoo0)
1928       && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1929       && INDEX_REGISTER_P (xfoo1, strict))
1930     return true;
1931 
1932   if (CONST_INT_P (xfoo1)
1933       && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1934       && INDEX_REGISTER_P (xfoo0, strict))
1935     return true;
1936 
1937   return false;
1938 }
1939 
1940 /* Return true if X is the sum of a register
1941    and a valid index term for mode MODE.  */
1942 static bool
reg_plus_index_p(rtx x,machine_mode mode,bool strict)1943 reg_plus_index_p (rtx x, machine_mode mode, bool strict)
1944 {
1945   rtx xfoo0, xfoo1;
1946 
1947   if (GET_CODE (x) != PLUS)
1948     return false;
1949 
1950   xfoo0 = XEXP (x, 0);
1951   xfoo1 = XEXP (x, 1);
1952 
1953   if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1954     return true;
1955 
1956   if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1957     return true;
1958 
1959   return false;
1960 }
1961 
1962 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address.  */
1963 static bool
indexable_address_p(rtx xfoo0,rtx xfoo1,machine_mode mode,bool strict)1964 indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
1965 {
1966   if (!CONSTANT_ADDRESS_P (xfoo0))
1967     return false;
1968   if (BASE_REGISTER_P (xfoo1, strict))
1969     return !flag_pic || mode == QImode;
1970   if (flag_pic && symbolic_operand (xfoo0, SImode))
1971     return false;
1972   return reg_plus_index_p (xfoo1, mode, strict);
1973 }
1974 
1975 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1976    that is a valid memory address for an instruction.
1977    The MODE argument is the machine mode for the MEM expression
1978    that wants to use this address.  */
1979 bool
vax_legitimate_address_p(machine_mode mode,rtx x,bool strict)1980 vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
1981 {
1982   rtx xfoo0, xfoo1;
1983 
1984   if (nonindexed_address_p (x, strict))
1985     return true;
1986 
1987   if (GET_CODE (x) != PLUS)
1988     return false;
1989 
1990   /* Handle <address>[index] represented with index-sum outermost */
1991 
1992   xfoo0 = XEXP (x, 0);
1993   xfoo1 = XEXP (x, 1);
1994 
1995   if (index_term_p (xfoo0, mode, strict)
1996       && nonindexed_address_p (xfoo1, strict))
1997     return true;
1998 
1999   if (index_term_p (xfoo1, mode, strict)
2000       && nonindexed_address_p (xfoo0, strict))
2001     return true;
2002 
2003   /* Handle offset(reg)[index] with offset added outermost */
2004 
2005   if (indexable_address_p (xfoo0, xfoo1, mode, strict)
2006       || indexable_address_p (xfoo1, xfoo0, mode, strict))
2007     return true;
2008 
2009   return false;
2010 }
2011 
2012 /* Return true if x (a legitimate address expression) has an effect that
2013    depends on the machine mode it is used for.  On the VAX, the predecrement
2014    and postincrement address depend thus (the amount of decrement or
2015    increment being the length of the operand) and all indexed address depend
2016    thus (because the index scale factor is the length of the operand).  */
2017 
2018 static bool
vax_mode_dependent_address_p(const_rtx x,addr_space_t as ATTRIBUTE_UNUSED)2019 vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
2020 {
2021   rtx xfoo0, xfoo1;
2022 
2023   /* Auto-increment cases are now dealt with generically in recog.c.  */
2024   if (GET_CODE (x) != PLUS)
2025     return false;
2026 
2027   xfoo0 = XEXP (x, 0);
2028   xfoo1 = XEXP (x, 1);
2029 
2030   if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
2031     return false;
2032   if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
2033     return false;
2034   if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
2035     return false;
2036   if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
2037     return false;
2038 
2039   return true;
2040 }
2041 
2042 static rtx
fixup_mathdi_operand(rtx x,machine_mode mode)2043 fixup_mathdi_operand (rtx x, machine_mode mode)
2044 {
2045   if (illegal_addsub_di_memory_operand (x, mode))
2046     {
2047       rtx addr = XEXP (x, 0);
2048       rtx temp = gen_reg_rtx (Pmode);
2049       rtx offset = 0;
2050 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
2051       if (GET_CODE (addr) == CONST && flag_pic)
2052 	{
2053 	  offset = XEXP (XEXP (addr, 0), 1);
2054 	  addr = XEXP (XEXP (addr, 0), 0);
2055 	}
2056 #endif
2057       emit_move_insn (temp, addr);
2058       if (offset)
2059 	temp = gen_rtx_PLUS (Pmode, temp, offset);
2060       x = gen_rtx_MEM (DImode, temp);
2061     }
2062   return x;
2063 }
2064 
2065 void
vax_expand_addsub_di_operands(rtx * operands,enum rtx_code code)2066 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
2067 {
2068   int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
2069   rtx temp;
2070 
2071   rtx (*gen_old_insn)(rtx, rtx, rtx);
2072   rtx (*gen_si_insn)(rtx, rtx, rtx);
2073   rtx (*gen_insn)(rtx, rtx, rtx);
2074 
2075   if (code == PLUS)
2076     {
2077       gen_old_insn = gen_adddi3_old;
2078       gen_si_insn = gen_addsi3;
2079       gen_insn = gen_adcdi3;
2080     }
2081   else if (code == MINUS)
2082     {
2083       gen_old_insn = gen_subdi3_old;
2084       gen_si_insn = gen_subsi3;
2085       gen_insn = gen_sbcdi3;
2086     }
2087   else
2088     gcc_unreachable ();
2089 
2090   /* If this is addition (thus operands are commutative) and if there is one
2091      addend that duplicates the desination, we want that addend to be the
2092      first addend.  */
2093   if (code == PLUS
2094       && rtx_equal_p (operands[0], operands[2])
2095       && !rtx_equal_p (operands[1], operands[2]))
2096     {
2097       temp = operands[2];
2098       operands[2] = operands[1];
2099       operands[1] = temp;
2100     }
2101 
2102   if (!TARGET_QMATH)
2103     {
2104       emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
2105     }
2106   else if (hi_only)
2107     {
2108       if (!rtx_equal_p (operands[0], operands[1])
2109 	  && (REG_P (operands[0]) && MEM_P (operands[1])))
2110 	{
2111 	  emit_move_insn (operands[0], operands[1]);
2112 	  operands[1] = operands[0];
2113 	}
2114 
2115       operands[0] = fixup_mathdi_operand (operands[0], DImode);
2116       operands[1] = fixup_mathdi_operand (operands[1], DImode);
2117       operands[2] = fixup_mathdi_operand (operands[2], DImode);
2118 
2119       if (!rtx_equal_p (operands[0], operands[1]))
2120 	emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
2121 			  operand_subword (operands[1], 0, 0, DImode));
2122 
2123       emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
2124 				 operand_subword (operands[1], 1, 0, DImode),
2125 				 operand_subword (operands[2], 1, 0, DImode)));
2126     }
2127   else
2128     {
2129       /* If are adding the same value together, that's really a multiply by 2,
2130 	 and that's just a left shift of 1.  */
2131       if (rtx_equal_p (operands[1], operands[2]))
2132 	{
2133 	  if (code == MINUS)
2134 	    emit_insn (gen_movdi (operands[0], const0_rtx));
2135 	  else
2136 	    emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
2137 	  return;
2138 	}
2139 
2140       operands[0] = fixup_mathdi_operand (operands[0], DImode);
2141 
2142       /* If an operand is the same as operand[0], use the operand[0] rtx
2143 	 because fixup will an equivalent rtx but not an equal one. */
2144 
2145       if (rtx_equal_p (operands[0], operands[1]))
2146 	operands[1] = operands[0];
2147       else
2148 	operands[1] = fixup_mathdi_operand (operands[1], DImode);
2149 
2150       if (rtx_equal_p (operands[0], operands[2]))
2151 	operands[2] = operands[0];
2152       else
2153 	operands[2] = fixup_mathdi_operand (operands[2], DImode);
2154 
2155       /* If we are subtracting not from ourselves [d = a - b], and because the
2156 	 carry ops are two operand only, we would need to do a move prior to
2157 	 the subtract.  And if d == b, we would need a temp otherwise
2158 	 [d = a, d -= d] and we end up with 0.  Instead we rewrite d = a - b
2159 	 into d = -b, d += a.  Since -b can never overflow, even if b == d,
2160 	 no temp is needed.
2161 
2162 	 If we are doing addition, since the carry ops are two operand, if
2163 	 we aren't adding to ourselves, move the first addend to the
2164 	 destination first.  */
2165 
2166       gcc_assert (operands[1] != const0_rtx || code == MINUS);
2167       if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
2168 	{
2169 	  if (code == MINUS && CONSTANT_P (operands[1]))
2170 	    {
2171 	      temp = gen_reg_rtx (DImode);
2172 	      emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
2173 	      code = PLUS;
2174 	      gen_insn = gen_adcdi3;
2175 	      operands[2] = operands[1];
2176 	      operands[1] = operands[0];
2177 	    }
2178 	  else
2179 	    emit_move_insn (operands[0], operands[1]);
2180 	}
2181 
2182       /* Subtracting a constant will have been rewritten to an addition of the
2183 	 negative of that constant before we get here.  */
2184       gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2185       emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2186     }
2187 }
2188 
2189 bool
adjacent_operands_p(rtx lo,rtx hi,machine_mode mode)2190 adjacent_operands_p (rtx lo, rtx hi, machine_mode mode)
2191 {
2192   HOST_WIDE_INT lo_offset;
2193   HOST_WIDE_INT hi_offset;
2194 
2195   if (GET_CODE (lo) != GET_CODE (hi))
2196     return false;
2197 
2198   if (REG_P (lo))
2199     return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
2200   if (CONST_INT_P (lo))
2201     return INTVAL (hi) == 0 && UINTVAL (lo) < 64;
2202   if (CONST_INT_P (lo))
2203     return mode != SImode;
2204 
2205   if (!MEM_P (lo))
2206     return false;
2207 
2208   if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2209     return false;
2210 
2211   lo = XEXP (lo, 0);
2212   hi = XEXP (hi, 0);
2213 
2214   if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2215     return rtx_equal_p (lo, hi);
2216 
2217   switch (GET_CODE (lo))
2218     {
2219     case REG:
2220     case SYMBOL_REF:
2221       lo_offset = 0;
2222       break;
2223     case CONST:
2224       lo = XEXP (lo, 0);
2225       /* FALLTHROUGH */
2226     case PLUS:
2227       if (!CONST_INT_P (XEXP (lo, 1)))
2228 	return false;
2229       lo_offset = INTVAL (XEXP (lo, 1));
2230       lo = XEXP (lo, 0);
2231       break;
2232     default:
2233       return false;
2234     }
2235 
2236   switch (GET_CODE (hi))
2237     {
2238     case REG:
2239     case SYMBOL_REF:
2240       hi_offset = 0;
2241       break;
2242     case CONST:
2243       hi = XEXP (hi, 0);
2244       /* FALLTHROUGH */
2245     case PLUS:
2246       if (!CONST_INT_P (XEXP (hi, 1)))
2247 	return false;
2248       hi_offset = INTVAL (XEXP (hi, 1));
2249       hi = XEXP (hi, 0);
2250       break;
2251     default:
2252       return false;
2253     }
2254 
2255   if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2256     return false;
2257 
2258   return rtx_equal_p (lo, hi)
2259 	 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2260 }
2261 
2262 /* Output assembler code for a block containing the constant parts
2263    of a trampoline, leaving space for the variable parts.  */
2264 
2265 /* On the VAX, the trampoline contains an entry mask and two instructions:
2266      .word NN
2267      movl $STATIC,r0   (store the functions static chain)
2268      jmp  *$FUNCTION   (jump to function code at address FUNCTION)  */
2269 
2270 static void
vax_asm_trampoline_template(FILE * f ATTRIBUTE_UNUSED)2271 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2272 {
2273   assemble_aligned_integer (2, const0_rtx);
2274   assemble_aligned_integer (2, GEN_INT (0x8fd0));
2275   assemble_aligned_integer (4, const0_rtx);
2276   assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2277   assemble_aligned_integer (2, GEN_INT (0x9f17));
2278   assemble_aligned_integer (4, const0_rtx);
2279 }
2280 
2281 /* We copy the register-mask from the function's pure code
2282    to the start of the trampoline.  */
2283 
2284 static void
vax_trampoline_init(rtx m_tramp,tree fndecl,rtx cxt)2285 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2286 {
2287   rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2288   rtx mem;
2289 
2290   emit_block_move (m_tramp, assemble_trampoline_template (),
2291 		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2292 
2293   mem = adjust_address (m_tramp, HImode, 0);
2294   emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2295 
2296   mem = adjust_address (m_tramp, SImode, 4);
2297   emit_move_insn (mem, cxt);
2298   mem = adjust_address (m_tramp, SImode, 11);
2299   emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
2300   emit_insn (gen_sync_istream ());
2301 }
2302 
2303 /* Value is the number of bytes of arguments automatically
2304    popped when returning from a subroutine call.
2305    FUNDECL is the declaration node of the function (as a tree),
2306    FUNTYPE is the data type of the function (as a tree),
2307    or for a library call it is an identifier node for the subroutine name.
2308    SIZE is the number of bytes of arguments passed on the stack.
2309 
2310    On the VAX, the RET insn pops a maximum of 255 args for any function.  */
2311 
2312 static poly_int64
vax_return_pops_args(tree fundecl ATTRIBUTE_UNUSED,tree funtype ATTRIBUTE_UNUSED,poly_int64 size)2313 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2314 		      tree funtype ATTRIBUTE_UNUSED, poly_int64 size)
2315 {
2316   return size > 255 * 4 ? 0 : (HOST_WIDE_INT) size;
2317 }
2318 
2319 /* Implement TARGET_FUNCTION_ARG.  On the VAX all args are pushed.  */
2320 
2321 static rtx
vax_function_arg(cumulative_args_t,const function_arg_info &)2322 vax_function_arg (cumulative_args_t, const function_arg_info &)
2323 {
2324   return NULL_RTX;
2325 }
2326 
2327 /* Update the data in CUM to advance over argument ARG.  */
2328 
2329 static void
vax_function_arg_advance(cumulative_args_t cum_v,const function_arg_info & arg)2330 vax_function_arg_advance (cumulative_args_t cum_v,
2331 			  const function_arg_info &arg)
2332 {
2333   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2334 
2335   *cum += (arg.promoted_size_in_bytes () + 3) & ~3;
2336 }
2337 
2338 static HOST_WIDE_INT
vax_starting_frame_offset(void)2339 vax_starting_frame_offset (void)
2340 {
2341   /* On ELF targets, reserve the top of the stack for exception handler
2342      stackadj value.  */
2343   return TARGET_ELF ? -4 : 0;
2344 }
2345 
2346 bool
vax_decomposed_dimode_operand_p(rtx lo,rtx hi)2347 vax_decomposed_dimode_operand_p (rtx lo, rtx hi)
2348 {
2349   HOST_WIDE_INT lo_offset = 0;
2350   HOST_WIDE_INT hi_offset = 0;
2351 
2352   /* If the codes aren't the same, can't be a DImode operand.  */
2353   if (GET_CODE (lo) != GET_CODE (hi))
2354     return false;
2355 
2356   /* If a register, hi regno must be one more than the lo regno.  */
2357   if (REG_P (lo))
2358     return REGNO (lo) + 1 == REGNO (hi);
2359 
2360   /* If not memory, can't be a DImode operand.  */
2361   if (!MEM_P (lo))
2362     return false;
2363 
2364   /* Get addresses of memory operands.  */
2365   lo = XEXP(lo, 0);
2366   hi = XEXP(hi, 0);
2367 
2368   /* If POST_INC, regno must match.  */
2369   if (GET_CODE (lo) == POST_INC && GET_CODE (hi) == POST_INC)
2370     return REGNO (XEXP (lo, 0)) == REGNO (XEXP (hi, 0));
2371 
2372   if (GET_CODE (lo) == PLUS)
2373     {
2374       /* If PLUS or MULT, this must an indexed address so fail.  */
2375       if (GET_CODE (XEXP (lo, 0)) == PLUS
2376 	  || GET_CODE (XEXP (lo, 0)) == MULT
2377 	  || !CONST_INT_P (XEXP (lo, 1)))
2378 	return false;
2379       lo_offset = INTVAL (XEXP (lo, 1));
2380       lo = XEXP(lo, 0);
2381     }
2382 
2383   if (GET_CODE (hi) == PLUS)
2384     {
2385       /* If PLUS or MULT, this must an indexed address so fail.  */
2386       if (GET_CODE (XEXP (hi, 0)) == PLUS
2387 	  || GET_CODE (XEXP (hi, 0)) == MULT
2388 	  || !CONST_INT_P (XEXP (hi, 1)))
2389 	return false;
2390       hi_offset = INTVAL (XEXP (hi, 1));
2391       hi = XEXP(hi, 0);
2392     }
2393 
2394   return rtx_equal_p(lo, hi) && lo_offset + 4 == hi_offset;
2395 }
2396