1 /* Subroutines for insn-output.c for VAX.
2    Copyright (C) 1987-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "df.h"
28 #include "tm_p.h"
29 #include "optabs.h"
30 #include "regs.h"
31 #include "emit-rtl.h"
32 #include "calls.h"
33 #include "varasm.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "expr.h"
37 #include "reload.h"
38 #include "builtins.h"
39 
40 /* This file should be included last.  */
41 #include "target-def.h"
42 
43 static void vax_option_override (void);
44 static bool vax_legitimate_address_p (machine_mode, rtx, bool);
45 static void vax_file_start (void);
46 static void vax_init_libfuncs (void);
47 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
48 				 HOST_WIDE_INT, tree);
49 static int vax_address_cost_1 (rtx);
50 static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
51 static bool vax_rtx_costs (rtx, machine_mode, int, int, int *, bool);
52 static rtx vax_function_arg (cumulative_args_t, machine_mode,
53 			     const_tree, bool);
54 static void vax_function_arg_advance (cumulative_args_t, machine_mode,
55 				      const_tree, bool);
56 static rtx vax_struct_value_rtx (tree, int);
57 static rtx vax_builtin_setjmp_frame_value (void);
58 static void vax_asm_trampoline_template (FILE *);
59 static void vax_trampoline_init (rtx, tree, rtx);
60 static int vax_return_pops_args (tree, tree, int);
61 static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
62 
63 /* Initialize the GCC target structure.  */
64 #undef TARGET_ASM_ALIGNED_HI_OP
65 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
66 
67 #undef TARGET_ASM_FILE_START
68 #define TARGET_ASM_FILE_START vax_file_start
69 #undef TARGET_ASM_FILE_START_APP_OFF
70 #define TARGET_ASM_FILE_START_APP_OFF true
71 
72 #undef TARGET_INIT_LIBFUNCS
73 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
74 
75 #undef TARGET_ASM_OUTPUT_MI_THUNK
76 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
77 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
78 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
79 
80 #undef TARGET_RTX_COSTS
81 #define TARGET_RTX_COSTS vax_rtx_costs
82 #undef TARGET_ADDRESS_COST
83 #define TARGET_ADDRESS_COST vax_address_cost
84 
85 #undef TARGET_PROMOTE_PROTOTYPES
86 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
87 
88 #undef TARGET_FUNCTION_ARG
89 #define TARGET_FUNCTION_ARG vax_function_arg
90 #undef TARGET_FUNCTION_ARG_ADVANCE
91 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
92 
93 #undef TARGET_STRUCT_VALUE_RTX
94 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
95 
96 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
97 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
98 
99 #undef TARGET_LEGITIMATE_ADDRESS_P
100 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
101 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
102 #define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
103 
104 #undef TARGET_FRAME_POINTER_REQUIRED
105 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
106 
107 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
108 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
109 #undef TARGET_TRAMPOLINE_INIT
110 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
111 #undef TARGET_RETURN_POPS_ARGS
112 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
113 
114 #undef TARGET_OPTION_OVERRIDE
115 #define TARGET_OPTION_OVERRIDE vax_option_override
116 
117 struct gcc_target targetm = TARGET_INITIALIZER;
118 
119 /* Set global variables as needed for the options enabled.  */
120 
121 static void
vax_option_override(void)122 vax_option_override (void)
123 {
124   /* We're VAX floating point, not IEEE floating point.  */
125   if (TARGET_G_FLOAT)
126     REAL_MODE_FORMAT (DFmode) = &vax_g_format;
127 
128 #ifdef SUBTARGET_OVERRIDE_OPTIONS
129   SUBTARGET_OVERRIDE_OPTIONS;
130 #endif
131 }
132 
133 static void
vax_add_reg_cfa_offset(rtx insn,int offset,rtx src)134 vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
135 {
136   rtx x;
137 
138   x = plus_constant (Pmode, frame_pointer_rtx, offset);
139   x = gen_rtx_MEM (SImode, x);
140   x = gen_rtx_SET (x, src);
141   add_reg_note (insn, REG_CFA_OFFSET, x);
142 }
143 
144 /* Generate the assembly code for function entry.  FILE is a stdio
145    stream to output the code to.  SIZE is an int: how many units of
146    temporary storage to allocate.
147 
148    Refer to the array `regs_ever_live' to determine which registers to
149    save; `regs_ever_live[I]' is nonzero if register number I is ever
150    used in the function.  This function is responsible for knowing
151    which registers should not be saved even if used.  */
152 
153 void
vax_expand_prologue(void)154 vax_expand_prologue (void)
155 {
156   int regno, offset;
157   int mask = 0;
158   HOST_WIDE_INT size;
159   rtx insn;
160 
161   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
162     if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
163       mask |= 1 << regno;
164 
165   insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
166   RTX_FRAME_RELATED_P (insn) = 1;
167 
168   /* The layout of the CALLG/S stack frame is follows:
169 
170 		<- CFA, AP
171 	r11
172 	r10
173 	...	Registers saved as specified by MASK
174 	r3
175 	r2
176 	return-addr
177 	old fp
178 	old ap
179 	old psw
180 	zero
181 		<- FP, SP
182 
183      The rest of the prologue will adjust the SP for the local frame.  */
184 
185   vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
186   vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
187   vax_add_reg_cfa_offset (insn, 12, pc_rtx);
188 
189   offset = 16;
190   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
191     if (mask & (1 << regno))
192       {
193 	vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
194 	offset += 4;
195       }
196 
197   /* Because add_reg_note pushes the notes, adding this last means that
198      it will be processed first.  This is required to allow the other
199      notes be interpreted properly.  */
200   add_reg_note (insn, REG_CFA_DEF_CFA,
201 		plus_constant (Pmode, frame_pointer_rtx, offset));
202 
203   /* Allocate the local stack frame.  */
204   size = get_frame_size ();
205   size -= STARTING_FRAME_OFFSET;
206   emit_insn (gen_addsi3 (stack_pointer_rtx,
207 			 stack_pointer_rtx, GEN_INT (-size)));
208 
209   /* Do not allow instructions referencing local stack memory to be
210      scheduled before the frame is allocated.  This is more pedantic
211      than anything else, given that VAX does not currently have a
212      scheduling description.  */
213   emit_insn (gen_blockage ());
214 }
215 
216 /* When debugging with stabs, we want to output an extra dummy label
217    so that gas can distinguish between D_float and G_float prior to
218    processing the .stabs directive identifying type double.  */
219 static void
vax_file_start(void)220 vax_file_start (void)
221 {
222   default_file_start ();
223 
224   if (write_symbols == DBX_DEBUG)
225     fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
226 }
227 
228 /* We can use the BSD C library routines for the libgcc calls that are
229    still generated, since that's what they boil down to anyways.  When
230    ELF, avoid the user's namespace.  */
231 
232 static void
vax_init_libfuncs(void)233 vax_init_libfuncs (void)
234 {
235   if (TARGET_BSD_DIVMOD)
236     {
237       set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
238       set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
239     }
240 }
241 
242 /* This is like nonimmediate_operand with a restriction on the type of MEM.  */
243 
244 static void
split_quadword_operands(rtx insn,enum rtx_code code,rtx * operands,rtx * low,int n)245 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
246 			 rtx * low, int n)
247 {
248   int i;
249 
250   for (i = 0; i < n; i++)
251     low[i] = 0;
252 
253   for (i = 0; i < n; i++)
254     {
255       if (MEM_P (operands[i])
256 	  && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
257 	      || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
258 	{
259 	  rtx addr = XEXP (operands[i], 0);
260 	  operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
261 	}
262       else if (optimize_size && MEM_P (operands[i])
263 	       && REG_P (XEXP (operands[i], 0))
264 	       && (code != MINUS || operands[1] != const0_rtx)
265 	       && find_regno_note (insn, REG_DEAD,
266 				   REGNO (XEXP (operands[i], 0))))
267 	{
268 	  low[i] = gen_rtx_MEM (SImode,
269 				gen_rtx_POST_INC (Pmode,
270 						  XEXP (operands[i], 0)));
271 	  operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
272 	}
273       else
274 	{
275 	  low[i] = operand_subword (operands[i], 0, 0, DImode);
276 	  operands[i] = operand_subword (operands[i], 1, 0, DImode);
277 	}
278     }
279 }
280 
281 void
print_operand_address(FILE * file,rtx addr)282 print_operand_address (FILE * file, rtx addr)
283 {
284   rtx orig = addr;
285   rtx reg1, breg, ireg;
286   rtx offset;
287 
288  retry:
289   switch (GET_CODE (addr))
290     {
291     case MEM:
292       fprintf (file, "*");
293       addr = XEXP (addr, 0);
294       goto retry;
295 
296     case REG:
297       fprintf (file, "(%s)", reg_names[REGNO (addr)]);
298       break;
299 
300     case PRE_DEC:
301       fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
302       break;
303 
304     case POST_INC:
305       fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
306       break;
307 
308     case PLUS:
309       /* There can be either two or three things added here.  One must be a
310 	 REG.  One can be either a REG or a MULT of a REG and an appropriate
311 	 constant, and the third can only be a constant or a MEM.
312 
313 	 We get these two or three things and put the constant or MEM in
314 	 OFFSET, the MULT or REG in IREG, and the REG in BREG.  If we have
315 	 a register and can't tell yet if it is a base or index register,
316 	 put it into REG1.  */
317 
318       reg1 = 0; ireg = 0; breg = 0; offset = 0;
319 
320       if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
321 	  || MEM_P (XEXP (addr, 0)))
322 	{
323 	  offset = XEXP (addr, 0);
324 	  addr = XEXP (addr, 1);
325 	}
326       else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
327 	       || MEM_P (XEXP (addr, 1)))
328 	{
329 	  offset = XEXP (addr, 1);
330 	  addr = XEXP (addr, 0);
331 	}
332       else if (GET_CODE (XEXP (addr, 1)) == MULT)
333 	{
334 	  ireg = XEXP (addr, 1);
335 	  addr = XEXP (addr, 0);
336 	}
337       else if (GET_CODE (XEXP (addr, 0)) == MULT)
338 	{
339 	  ireg = XEXP (addr, 0);
340 	  addr = XEXP (addr, 1);
341 	}
342       else if (REG_P (XEXP (addr, 1)))
343 	{
344 	  reg1 = XEXP (addr, 1);
345 	  addr = XEXP (addr, 0);
346 	}
347       else if (REG_P (XEXP (addr, 0)))
348 	{
349 	  reg1 = XEXP (addr, 0);
350 	  addr = XEXP (addr, 1);
351 	}
352       else
353 	gcc_unreachable ();
354 
355       if (REG_P (addr))
356 	{
357 	  if (reg1)
358 	    ireg = addr;
359 	  else
360 	    reg1 = addr;
361 	}
362       else if (GET_CODE (addr) == MULT)
363 	ireg = addr;
364       else
365 	{
366 	  gcc_assert (GET_CODE (addr) == PLUS);
367 	  if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
368 	      || MEM_P (XEXP (addr, 0)))
369 	    {
370 	      if (offset)
371 		{
372 		  if (CONST_INT_P (offset))
373 		    offset = plus_constant (Pmode, XEXP (addr, 0),
374 					    INTVAL (offset));
375 		  else
376 		    {
377 		      gcc_assert (CONST_INT_P (XEXP (addr, 0)));
378 		      offset = plus_constant (Pmode, offset,
379 					      INTVAL (XEXP (addr, 0)));
380 		    }
381 		}
382 	      offset = XEXP (addr, 0);
383 	    }
384 	  else if (REG_P (XEXP (addr, 0)))
385 	    {
386 	      if (reg1)
387 		ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
388 	      else
389 		reg1 = XEXP (addr, 0);
390 	    }
391 	  else
392 	    {
393 	      gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
394 	      gcc_assert (!ireg);
395 	      ireg = XEXP (addr, 0);
396 	    }
397 
398 	  if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
399 	      || MEM_P (XEXP (addr, 1)))
400 	    {
401 	      if (offset)
402 		{
403 		  if (CONST_INT_P (offset))
404 		    offset = plus_constant (Pmode, XEXP (addr, 1),
405 					    INTVAL (offset));
406 		  else
407 		    {
408 		      gcc_assert (CONST_INT_P (XEXP (addr, 1)));
409 		      offset = plus_constant (Pmode, offset,
410 					      INTVAL (XEXP (addr, 1)));
411 		    }
412 		}
413 	      offset = XEXP (addr, 1);
414 	    }
415 	  else if (REG_P (XEXP (addr, 1)))
416 	    {
417 	      if (reg1)
418 		ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
419 	      else
420 		reg1 = XEXP (addr, 1);
421 	    }
422 	  else
423 	    {
424 	      gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
425 	      gcc_assert (!ireg);
426 	      ireg = XEXP (addr, 1);
427 	    }
428 	}
429 
430       /* If REG1 is nonzero, figure out if it is a base or index register.  */
431       if (reg1)
432 	{
433 	  if (breg
434 	      || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
435 	      || (offset
436 		  && (MEM_P (offset)
437 		      || (flag_pic && symbolic_operand (offset, SImode)))))
438 	    {
439 	      gcc_assert (!ireg);
440 	      ireg = reg1;
441 	    }
442 	  else
443 	    breg = reg1;
444 	}
445 
446       if (offset != 0)
447 	{
448 	  if (flag_pic && symbolic_operand (offset, SImode))
449 	    {
450 	      if (breg && ireg)
451 		{
452 		  debug_rtx (orig);
453 		  output_operand_lossage ("symbol used with both base and indexed registers");
454 		}
455 
456 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
457 	      if (flag_pic > 1 && GET_CODE (offset) == CONST
458 		  && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
459 		  && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
460 		{
461 		  debug_rtx (orig);
462 		  output_operand_lossage ("symbol with offset used in PIC mode");
463 		}
464 #endif
465 
466 	      /* symbol(reg) isn't PIC, but symbol[reg] is.  */
467 	      if (breg)
468 		{
469 		  ireg = breg;
470 		  breg = 0;
471 		}
472 
473 	    }
474 
475 	  output_address (VOIDmode, offset);
476 	}
477 
478       if (breg != 0)
479 	fprintf (file, "(%s)", reg_names[REGNO (breg)]);
480 
481       if (ireg != 0)
482 	{
483 	  if (GET_CODE (ireg) == MULT)
484 	    ireg = XEXP (ireg, 0);
485 	  gcc_assert (REG_P (ireg));
486 	  fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
487 	}
488       break;
489 
490     default:
491       output_addr_const (file, addr);
492     }
493 }
494 
495 void
print_operand(FILE * file,rtx x,int code)496 print_operand (FILE *file, rtx x, int code)
497 {
498   if (code == '#')
499     fputc (ASM_DOUBLE_CHAR, file);
500   else if (code == '|')
501     fputs (REGISTER_PREFIX, file);
502   else if (code == 'c')
503     fputs (cond_name (x), file);
504   else if (code == 'C')
505     fputs (rev_cond_name (x), file);
506   else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
507     fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
508   else if (code == 'P' && CONST_INT_P (x))
509     fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
510   else if (code == 'N' && CONST_INT_P (x))
511     fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
512   /* rotl instruction cannot deal with negative arguments.  */
513   else if (code == 'R' && CONST_INT_P (x))
514     fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
515   else if (code == 'H' && CONST_INT_P (x))
516     fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
517   else if (code == 'h' && CONST_INT_P (x))
518     fprintf (file, "$%d", (short) - INTVAL (x));
519   else if (code == 'B' && CONST_INT_P (x))
520     fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
521   else if (code == 'b' && CONST_INT_P (x))
522     fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
523   else if (code == 'M' && CONST_INT_P (x))
524     fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
525   else if (code == 'x' && CONST_INT_P (x))
526     fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
527   else if (REG_P (x))
528     fprintf (file, "%s", reg_names[REGNO (x)]);
529   else if (MEM_P (x))
530     output_address (GET_MODE (x), XEXP (x, 0));
531   else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
532     {
533       char dstr[30];
534       real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
535 		       sizeof (dstr), 0, 1);
536       fprintf (file, "$0f%s", dstr);
537     }
538   else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
539     {
540       char dstr[30];
541       real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
542 		       sizeof (dstr), 0, 1);
543       fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
544     }
545   else
546     {
547       if (flag_pic > 1 && symbolic_operand (x, SImode))
548 	{
549 	  debug_rtx (x);
550 	  output_operand_lossage ("symbol used as immediate operand");
551 	}
552       putc ('$', file);
553       output_addr_const (file, x);
554     }
555 }
556 
557 const char *
cond_name(rtx op)558 cond_name (rtx op)
559 {
560   switch (GET_CODE (op))
561     {
562     case NE:
563       return "neq";
564     case EQ:
565       return "eql";
566     case GE:
567       return "geq";
568     case GT:
569       return "gtr";
570     case LE:
571       return "leq";
572     case LT:
573       return "lss";
574     case GEU:
575       return "gequ";
576     case GTU:
577       return "gtru";
578     case LEU:
579       return "lequ";
580     case LTU:
581       return "lssu";
582 
583     default:
584       gcc_unreachable ();
585     }
586 }
587 
588 const char *
rev_cond_name(rtx op)589 rev_cond_name (rtx op)
590 {
591   switch (GET_CODE (op))
592     {
593     case EQ:
594       return "neq";
595     case NE:
596       return "eql";
597     case LT:
598       return "geq";
599     case LE:
600       return "gtr";
601     case GT:
602       return "leq";
603     case GE:
604       return "lss";
605     case LTU:
606       return "gequ";
607     case LEU:
608       return "gtru";
609     case GTU:
610       return "lequ";
611     case GEU:
612       return "lssu";
613 
614     default:
615       gcc_unreachable ();
616     }
617 }
618 
619 static bool
vax_float_literal(rtx c)620 vax_float_literal (rtx c)
621 {
622   machine_mode mode;
623   const REAL_VALUE_TYPE *r;
624   REAL_VALUE_TYPE s;
625   int i;
626 
627   if (GET_CODE (c) != CONST_DOUBLE)
628     return false;
629 
630   mode = GET_MODE (c);
631 
632   if (c == const_tiny_rtx[(int) mode][0]
633       || c == const_tiny_rtx[(int) mode][1]
634       || c == const_tiny_rtx[(int) mode][2])
635     return true;
636 
637   r = CONST_DOUBLE_REAL_VALUE (c);
638 
639   for (i = 0; i < 7; i++)
640     {
641       int x = 1 << i;
642       bool ok;
643       real_from_integer (&s, mode, x, SIGNED);
644 
645       if (real_equal (r, &s))
646 	return true;
647       ok = exact_real_inverse (mode, &s);
648       gcc_assert (ok);
649       if (real_equal (r, &s))
650 	return true;
651     }
652   return false;
653 }
654 
655 
656 /* Return the cost in cycles of a memory address, relative to register
657    indirect.
658 
659    Each of the following adds the indicated number of cycles:
660 
661    1 - symbolic address
662    1 - pre-decrement
663    1 - indexing and/or offset(register)
664    2 - indirect */
665 
666 
667 static int
vax_address_cost_1(rtx addr)668 vax_address_cost_1 (rtx addr)
669 {
670   int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
671   rtx plus_op0 = 0, plus_op1 = 0;
672  restart:
673   switch (GET_CODE (addr))
674     {
675     case PRE_DEC:
676       predec = 1;
677     case REG:
678     case SUBREG:
679     case POST_INC:
680       reg = 1;
681       break;
682     case MULT:
683       indexed = 1;	/* 2 on VAX 2 */
684       break;
685     case CONST_INT:
686       /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
687       if (offset == 0)
688 	offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
689       break;
690     case CONST:
691     case SYMBOL_REF:
692       offset = 1;	/* 2 on VAX 2 */
693       break;
694     case LABEL_REF:	/* this is probably a byte offset from the pc */
695       if (offset == 0)
696 	offset = 1;
697       break;
698     case PLUS:
699       if (plus_op0)
700 	plus_op1 = XEXP (addr, 0);
701       else
702 	plus_op0 = XEXP (addr, 0);
703       addr = XEXP (addr, 1);
704       goto restart;
705     case MEM:
706       indir = 2;	/* 3 on VAX 2 */
707       addr = XEXP (addr, 0);
708       goto restart;
709     default:
710       break;
711     }
712 
713   /* Up to 3 things can be added in an address.  They are stored in
714      plus_op0, plus_op1, and addr.  */
715 
716   if (plus_op0)
717     {
718       addr = plus_op0;
719       plus_op0 = 0;
720       goto restart;
721     }
722   if (plus_op1)
723     {
724       addr = plus_op1;
725       plus_op1 = 0;
726       goto restart;
727     }
728   /* Indexing and register+offset can both be used (except on a VAX 2)
729      without increasing execution time over either one alone.  */
730   if (reg && indexed && offset)
731     return reg + indir + offset + predec;
732   return reg + indexed + indir + offset + predec;
733 }
734 
735 static int
vax_address_cost(rtx x,machine_mode mode ATTRIBUTE_UNUSED,addr_space_t as ATTRIBUTE_UNUSED,bool speed ATTRIBUTE_UNUSED)736 vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
737 		  addr_space_t as ATTRIBUTE_UNUSED,
738 		  bool speed ATTRIBUTE_UNUSED)
739 {
740   return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
741 }
742 
743 /* Cost of an expression on a VAX.  This version has costs tuned for the
744    CVAX chip (found in the VAX 3 series) with comments for variations on
745    other models.
746 
747    FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
748    and FLOAT_TRUNCATE.  We need a -mcpu option to allow provision of
749    costs on a per cpu basis.  */
750 
751 static bool
vax_rtx_costs(rtx x,machine_mode mode,int outer_code,int opno ATTRIBUTE_UNUSED,int * total,bool speed ATTRIBUTE_UNUSED)752 vax_rtx_costs (rtx x, machine_mode mode, int outer_code,
753 	       int opno ATTRIBUTE_UNUSED,
754 	       int *total, bool speed ATTRIBUTE_UNUSED)
755 {
756   int code = GET_CODE (x);
757   int i = 0;				   /* may be modified in switch */
758   const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
759 
760   switch (code)
761     {
762       /* On a VAX, constants from 0..63 are cheap because they can use the
763 	 1 byte literal constant format.  Compare to -1 should be made cheap
764 	 so that decrement-and-branch insns can be formed more easily (if
765 	 the value -1 is copied to a register some decrement-and-branch
766 	 patterns will not match).  */
767     case CONST_INT:
768       if (INTVAL (x) == 0)
769 	{
770 	  *total = 0;
771 	  return true;
772 	}
773       if (outer_code == AND)
774 	{
775 	  *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
776 	  return true;
777 	}
778       if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
779 	  || (outer_code == COMPARE
780 	      && INTVAL (x) == -1)
781 	  || ((outer_code == PLUS || outer_code == MINUS)
782 	      && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
783 	{
784 	  *total = 1;
785 	  return true;
786 	}
787       /* FALLTHRU */
788 
789     case CONST:
790     case LABEL_REF:
791     case SYMBOL_REF:
792       *total = 3;
793       return true;
794 
795     case CONST_DOUBLE:
796       if (GET_MODE_CLASS (mode) == MODE_FLOAT)
797 	*total = vax_float_literal (x) ? 5 : 8;
798       else
799 	*total = ((CONST_DOUBLE_HIGH (x) == 0
800 		   && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
801 		  || (outer_code == PLUS
802 		      && CONST_DOUBLE_HIGH (x) == -1
803 		      && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
804 		 ? 2 : 5;
805       return true;
806 
807     case POST_INC:
808       *total = 2;
809       return true;		/* Implies register operand.  */
810 
811     case PRE_DEC:
812       *total = 3;
813       return true;		/* Implies register operand.  */
814 
815     case MULT:
816       switch (mode)
817 	{
818 	case DFmode:
819 	  *total = 16;		/* 4 on VAX 9000 */
820 	  break;
821 	case SFmode:
822 	  *total = 9;		/* 4 on VAX 9000, 12 on VAX 2 */
823 	  break;
824 	case DImode:
825 	  *total = 16;		/* 6 on VAX 9000, 28 on VAX 2 */
826 	  break;
827 	case SImode:
828 	case HImode:
829 	case QImode:
830 	  *total = 10;		/* 3-4 on VAX 9000, 20-28 on VAX 2 */
831 	  break;
832 	default:
833 	  *total = MAX_COST;	/* Mode is not supported.  */
834 	  return true;
835 	}
836       break;
837 
838     case UDIV:
839       if (mode != SImode)
840 	{
841 	  *total = MAX_COST;	/* Mode is not supported.  */
842 	  return true;
843 	}
844       *total = 17;
845       break;
846 
847     case DIV:
848       if (mode == DImode)
849 	*total = 30;		/* Highly variable.  */
850       else if (mode == DFmode)
851 	/* divide takes 28 cycles if the result is not zero, 13 otherwise */
852 	*total = 24;
853       else
854 	*total = 11;		/* 25 on VAX 2 */
855       break;
856 
857     case MOD:
858       *total = 23;
859       break;
860 
861     case UMOD:
862       if (mode != SImode)
863 	{
864 	  *total = MAX_COST;	/* Mode is not supported.  */
865 	  return true;
866 	}
867       *total = 29;
868       break;
869 
870     case FLOAT:
871       *total = (6		/* 4 on VAX 9000 */
872 		+ (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
873       break;
874 
875     case FIX:
876       *total = 7;		/* 17 on VAX 2 */
877       break;
878 
879     case ASHIFT:
880     case LSHIFTRT:
881     case ASHIFTRT:
882       if (mode == DImode)
883 	*total = 12;
884       else
885 	*total = 10;		/* 6 on VAX 9000 */
886       break;
887 
888     case ROTATE:
889     case ROTATERT:
890       *total = 6;		/* 5 on VAX 2, 4 on VAX 9000 */
891       if (CONST_INT_P (XEXP (x, 1)))
892 	fmt = "e"; 		/* all constant rotate counts are short */
893       break;
894 
895     case PLUS:
896     case MINUS:
897       *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
898       /* Small integer operands can use subl2 and addl2.  */
899       if ((CONST_INT_P (XEXP (x, 1)))
900 	  && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
901 	fmt = "e";
902       break;
903 
904     case IOR:
905     case XOR:
906       *total = 3;
907       break;
908 
909     case AND:
910       /* AND is special because the first operand is complemented.  */
911       *total = 3;
912       if (CONST_INT_P (XEXP (x, 0)))
913 	{
914 	  if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
915 	    *total = 4;
916 	  fmt = "e";
917 	  i = 1;
918 	}
919       break;
920 
921     case NEG:
922       if (mode == DFmode)
923 	*total = 9;
924       else if (mode == SFmode)
925 	*total = 6;
926       else if (mode == DImode)
927 	*total = 4;
928       else
929 	*total = 2;
930       break;
931 
932     case NOT:
933       *total = 2;
934       break;
935 
936     case ZERO_EXTRACT:
937     case SIGN_EXTRACT:
938       *total = 15;
939       break;
940 
941     case MEM:
942       if (mode == DImode || mode == DFmode)
943 	*total = 5;		/* 7 on VAX 2 */
944       else
945 	*total = 3;		/* 4 on VAX 2 */
946       x = XEXP (x, 0);
947       if (!REG_P (x) && GET_CODE (x) != POST_INC)
948 	*total += vax_address_cost_1 (x);
949       return true;
950 
951     case FLOAT_EXTEND:
952     case FLOAT_TRUNCATE:
953     case TRUNCATE:
954       *total = 3;		/* FIXME: Costs need to be checked  */
955       break;
956 
957     default:
958       return false;
959     }
960 
961   /* Now look inside the expression.  Operands which are not registers or
962      short constants add to the cost.
963 
964      FMT and I may have been adjusted in the switch above for instructions
965      which require special handling.  */
966 
967   while (*fmt++ == 'e')
968     {
969       rtx op = XEXP (x, i);
970 
971       i += 1;
972       code = GET_CODE (op);
973 
974       /* A NOT is likely to be found as the first operand of an AND
975 	 (in which case the relevant cost is of the operand inside
976 	 the not) and not likely to be found anywhere else.  */
977       if (code == NOT)
978 	op = XEXP (op, 0), code = GET_CODE (op);
979 
980       switch (code)
981 	{
982 	case CONST_INT:
983 	  if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
984 	      && mode != QImode)
985 	    *total += 1;	/* 2 on VAX 2 */
986 	  break;
987 	case CONST:
988 	case LABEL_REF:
989 	case SYMBOL_REF:
990 	  *total += 1;		/* 2 on VAX 2 */
991 	  break;
992 	case CONST_DOUBLE:
993 	  if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
994 	    {
995 	      /* Registers are faster than floating point constants -- even
996 		 those constants which can be encoded in a single byte.  */
997 	      if (vax_float_literal (op))
998 		*total += 1;
999 	      else
1000 		*total += (GET_MODE (x) == DFmode) ? 3 : 2;
1001 	    }
1002 	  else
1003 	    {
1004 	      if (CONST_DOUBLE_HIGH (op) != 0
1005 		  || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1006 		*total += 2;
1007 	    }
1008 	  break;
1009 	case MEM:
1010 	  *total += 1;		/* 2 on VAX 2 */
1011 	  if (!REG_P (XEXP (op, 0)))
1012 	    *total += vax_address_cost_1 (XEXP (op, 0));
1013 	  break;
1014 	case REG:
1015 	case SUBREG:
1016 	  break;
1017 	default:
1018 	  *total += 1;
1019 	  break;
1020 	}
1021     }
1022   return true;
1023 }
1024 
1025 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1026    Used for C++ multiple inheritance.
1027 	.mask	^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11>  #conservative entry mask
1028 	addl2	$DELTA, 4(ap)	#adjust first argument
1029 	jmp	FUNCTION+2	#jump beyond FUNCTION's entry mask
1030 */
1031 
1032 static void
vax_output_mi_thunk(FILE * file,tree thunk ATTRIBUTE_UNUSED,HOST_WIDE_INT delta,HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,tree function)1033 vax_output_mi_thunk (FILE * file,
1034 		     tree thunk ATTRIBUTE_UNUSED,
1035 		     HOST_WIDE_INT delta,
1036 		     HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1037 		     tree function)
1038 {
1039   fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1040   asm_fprintf (file, ",4(%Rap)\n");
1041   fprintf (file, "\tjmp ");
1042   assemble_name (file,  XSTR (XEXP (DECL_RTL (function), 0), 0));
1043   fprintf (file, "+2\n");
1044 }
1045 
1046 static rtx
vax_struct_value_rtx(tree fntype ATTRIBUTE_UNUSED,int incoming ATTRIBUTE_UNUSED)1047 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1048 		      int incoming ATTRIBUTE_UNUSED)
1049 {
1050   return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1051 }
1052 
1053 static rtx
vax_builtin_setjmp_frame_value(void)1054 vax_builtin_setjmp_frame_value (void)
1055 {
1056   return hard_frame_pointer_rtx;
1057 }
1058 
1059 /* Worker function for NOTICE_UPDATE_CC.  */
1060 
1061 void
vax_notice_update_cc(rtx exp,rtx insn ATTRIBUTE_UNUSED)1062 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1063 {
1064   if (GET_CODE (exp) == SET)
1065     {
1066       if (GET_CODE (SET_SRC (exp)) == CALL)
1067 	CC_STATUS_INIT;
1068       else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1069 	       && GET_CODE (SET_DEST (exp)) != PC)
1070 	{
1071 	  cc_status.flags = 0;
1072 	  /* The integer operations below don't set carry or
1073 	     set it in an incompatible way.  That's ok though
1074 	     as the Z bit is all we need when doing unsigned
1075 	     comparisons on the result of these insns (since
1076 	     they're always with 0).  Set CC_NO_OVERFLOW to
1077 	     generate the correct unsigned branches.  */
1078 	  switch (GET_CODE (SET_SRC (exp)))
1079 	    {
1080 	    case NEG:
1081 	      if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1082 		break;
1083 	    case AND:
1084 	    case IOR:
1085 	    case XOR:
1086 	    case NOT:
1087 	    case MEM:
1088 	    case REG:
1089 	      cc_status.flags = CC_NO_OVERFLOW;
1090 	      break;
1091 	    default:
1092 	      break;
1093 	    }
1094 	  cc_status.value1 = SET_DEST (exp);
1095 	  cc_status.value2 = SET_SRC (exp);
1096 	}
1097     }
1098   else if (GET_CODE (exp) == PARALLEL
1099 	   && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1100     {
1101       if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1102 	CC_STATUS_INIT;
1103       else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1104 	{
1105 	  cc_status.flags = 0;
1106 	  cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1107 	  cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1108 	}
1109       else
1110 	/* PARALLELs whose first element sets the PC are aob,
1111 	   sob insns.  They do change the cc's.  */
1112 	CC_STATUS_INIT;
1113     }
1114   else
1115     CC_STATUS_INIT;
1116   if (cc_status.value1 && REG_P (cc_status.value1)
1117       && cc_status.value2
1118       && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1119     cc_status.value2 = 0;
1120   if (cc_status.value1 && MEM_P (cc_status.value1)
1121       && cc_status.value2
1122       && MEM_P (cc_status.value2))
1123     cc_status.value2 = 0;
1124   /* Actual condition, one line up, should be that value2's address
1125      depends on value1, but that is too much of a pain.  */
1126 }
1127 
1128 /* Output integer move instructions.  */
1129 
1130 const char *
vax_output_int_move(rtx insn ATTRIBUTE_UNUSED,rtx * operands,machine_mode mode)1131 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1132 		     machine_mode mode)
1133 {
1134   rtx hi[3], lo[3];
1135   const char *pattern_hi, *pattern_lo;
1136 
1137   switch (mode)
1138     {
1139     case DImode:
1140       if (operands[1] == const0_rtx)
1141 	return "clrq %0";
1142       if (TARGET_QMATH && optimize_size
1143 	  && (CONST_INT_P (operands[1])
1144 	      || GET_CODE (operands[1]) == CONST_DOUBLE))
1145 	{
1146 	  unsigned HOST_WIDE_INT hval, lval;
1147 	  int n;
1148 
1149 	  if (GET_CODE (operands[1]) == CONST_DOUBLE)
1150 	    {
1151 	      gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1152 
1153 	      /* Make sure only the low 32 bits are valid.  */
1154 	      lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1155 	      hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1156 	    }
1157 	  else
1158 	    {
1159 	      lval = INTVAL (operands[1]);
1160 	      hval = 0;
1161 	    }
1162 
1163 	  /* Here we see if we are trying to see if the 64bit value is really
1164 	     a 6bit shifted some arbitrary amount.  If so, we can use ashq to
1165 	     shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1166 	     8 bytes - 1 shift byte - 1 short literal byte.  */
1167 	  if (lval != 0
1168 	      && (n = exact_log2 (lval & (- lval))) != -1
1169 	      && (lval >> n) < 64)
1170 	    {
1171 	      lval >>= n;
1172 
1173 	      /* On 32bit platforms, if the 6bits didn't overflow into the
1174 		 upper 32bit value that value better be 0.  If we have
1175 		 overflowed, make sure it wasn't too much.  */
1176 	      if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1177 		{
1178 		  if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1179 		    n = 0;	/* failure */
1180 		  else
1181 		    lval |= hval << (32 - n);
1182 		}
1183 	      /*  If n is 0, then ashq is not the best way to emit this.  */
1184 	      if (n > 0)
1185 		{
1186 		  operands[1] = GEN_INT (lval);
1187 		  operands[2] = GEN_INT (n);
1188 		  return "ashq %2,%D1,%0";
1189 		}
1190 #if HOST_BITS_PER_WIDE_INT == 32
1191 	    }
1192 	  /* On 32bit platforms, if the low 32bit value is 0, checkout the
1193 	     upper 32bit value.  */
1194 	  else if (hval != 0
1195 		   && (n = exact_log2 (hval & (- hval)) - 1) != -1
1196 		   && (hval >> n) < 64)
1197 	    {
1198 	      operands[1] = GEN_INT (hval >> n);
1199 	      operands[2] = GEN_INT (n + 32);
1200 	      return "ashq %2,%D1,%0";
1201 #endif
1202 	    }
1203 	}
1204 
1205       if (TARGET_QMATH
1206 	  && (!MEM_P (operands[0])
1207 	      || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1208 	      || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1209 	      || !illegal_addsub_di_memory_operand (operands[0], DImode))
1210 	  && ((CONST_INT_P (operands[1])
1211 	       && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1212 	      || GET_CODE (operands[1]) == CONST_DOUBLE))
1213 	{
1214 	  hi[0] = operands[0];
1215 	  hi[1] = operands[1];
1216 
1217 	  split_quadword_operands (insn, SET, hi, lo, 2);
1218 
1219 	  pattern_lo = vax_output_int_move (NULL, lo, SImode);
1220 	  pattern_hi = vax_output_int_move (NULL, hi, SImode);
1221 
1222 	  /* The patterns are just movl/movl or pushl/pushl then a movq will
1223 	     be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1224 	     bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1225 	     value bytes.  */
1226 	  if ((!strncmp (pattern_lo, "movl", 4)
1227 	      && !strncmp (pattern_hi, "movl", 4))
1228 	      || (!strncmp (pattern_lo, "pushl", 5)
1229 		  && !strncmp (pattern_hi, "pushl", 5)))
1230 	    return "movq %1,%0";
1231 
1232 	  if (MEM_P (operands[0])
1233 	      && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1234 	    {
1235 	      output_asm_insn (pattern_hi, hi);
1236 	      operands[0] = lo[0];
1237 	      operands[1] = lo[1];
1238 	      operands[2] = lo[2];
1239 	      return pattern_lo;
1240 	    }
1241 	  else
1242 	    {
1243 	      output_asm_insn (pattern_lo, lo);
1244 	      operands[0] = hi[0];
1245 	      operands[1] = hi[1];
1246 	      operands[2] = hi[2];
1247 	      return pattern_hi;
1248 	    }
1249 	}
1250       return "movq %1,%0";
1251 
1252     case SImode:
1253       if (symbolic_operand (operands[1], SImode))
1254 	{
1255 	  if (push_operand (operands[0], SImode))
1256 	    return "pushab %a1";
1257 	  return "movab %a1,%0";
1258 	}
1259 
1260       if (operands[1] == const0_rtx)
1261 	{
1262 	  if (push_operand (operands[1], SImode))
1263 	    return "pushl %1";
1264 	  return "clrl %0";
1265 	}
1266 
1267       if (CONST_INT_P (operands[1])
1268 	  && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1269 	{
1270 	  HOST_WIDE_INT i = INTVAL (operands[1]);
1271 	  int n;
1272 	  if ((unsigned HOST_WIDE_INT)(~i) < 64)
1273 	    return "mcoml %N1,%0";
1274 	  if ((unsigned HOST_WIDE_INT)i < 0x100)
1275 	    return "movzbl %1,%0";
1276 	  if (i >= -0x80 && i < 0)
1277 	    return "cvtbl %1,%0";
1278 	  if (optimize_size
1279 	      && (n = exact_log2 (i & (-i))) != -1
1280 	      && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1281 	    {
1282 	      operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1283 	      operands[2] = GEN_INT (n);
1284 	      return "ashl %2,%1,%0";
1285 	    }
1286 	  if ((unsigned HOST_WIDE_INT)i < 0x10000)
1287 	    return "movzwl %1,%0";
1288 	  if (i >= -0x8000 && i < 0)
1289 	    return "cvtwl %1,%0";
1290 	}
1291       if (push_operand (operands[0], SImode))
1292 	return "pushl %1";
1293       return "movl %1,%0";
1294 
1295     case HImode:
1296       if (CONST_INT_P (operands[1]))
1297 	{
1298 	  HOST_WIDE_INT i = INTVAL (operands[1]);
1299 	  if (i == 0)
1300 	    return "clrw %0";
1301 	  else if ((unsigned HOST_WIDE_INT)i < 64)
1302 	    return "movw %1,%0";
1303 	  else if ((unsigned HOST_WIDE_INT)~i < 64)
1304 	    return "mcomw %H1,%0";
1305 	  else if ((unsigned HOST_WIDE_INT)i < 256)
1306 	    return "movzbw %1,%0";
1307 	  else if (i >= -0x80 && i < 0)
1308 	    return "cvtbw %1,%0";
1309 	}
1310       return "movw %1,%0";
1311 
1312     case QImode:
1313       if (CONST_INT_P (operands[1]))
1314 	{
1315 	  HOST_WIDE_INT i = INTVAL (operands[1]);
1316 	  if (i == 0)
1317 	    return "clrb %0";
1318 	  else if ((unsigned HOST_WIDE_INT)~i < 64)
1319 	    return "mcomb %B1,%0";
1320 	}
1321       return "movb %1,%0";
1322 
1323     default:
1324       gcc_unreachable ();
1325     }
1326 }
1327 
1328 /* Output integer add instructions.
1329 
1330    The space-time-opcode tradeoffs for addition vary by model of VAX.
1331 
1332    On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1333    but it not faster on other models.
1334 
1335    "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1336    faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1337    a register is used in an address too soon after it is set.
1338    Compromise by using movab only when it is shorter than the add
1339    or the base register in the address is one of sp, ap, and fp,
1340    which are not modified very often.  */
1341 
1342 const char *
vax_output_int_add(rtx insn,rtx * operands,machine_mode mode)1343 vax_output_int_add (rtx insn, rtx *operands, machine_mode mode)
1344 {
1345   switch (mode)
1346     {
1347     case DImode:
1348       {
1349 	rtx low[3];
1350 	const char *pattern;
1351 	int carry = 1;
1352 	bool sub;
1353 
1354 	if (TARGET_QMATH && 0)
1355 	  debug_rtx (insn);
1356 
1357 	split_quadword_operands (insn, PLUS, operands, low, 3);
1358 
1359 	if (TARGET_QMATH)
1360 	  {
1361 	    gcc_assert (rtx_equal_p (operands[0], operands[1]));
1362 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1363 	    gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1364 	    gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1365 #endif
1366 
1367 	    /* No reason to add a 0 to the low part and thus no carry, so just
1368 	       emit the appropriate add/sub instruction.  */
1369 	    if (low[2] == const0_rtx)
1370 	      return vax_output_int_add (NULL, operands, SImode);
1371 
1372 	    /* Are we doing addition or subtraction?  */
1373 	    sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1374 
1375 	    /* We can't use vax_output_int_add since some the patterns don't
1376 	       modify the carry bit.  */
1377 	    if (sub)
1378 	      {
1379 		if (low[2] == constm1_rtx)
1380 		  pattern = "decl %0";
1381 		else
1382 		  pattern = "subl2 $%n2,%0";
1383 	      }
1384 	    else
1385 	      {
1386 		if (low[2] == const1_rtx)
1387 		  pattern = "incl %0";
1388 		else
1389 		  pattern = "addl2 %2,%0";
1390 	      }
1391 	    output_asm_insn (pattern, low);
1392 
1393 	    /* In 2's complement, -n = ~n + 1.  Since we are dealing with
1394 	       two 32bit parts, we complement each and then add one to
1395 	       low part.  We know that the low part can't overflow since
1396 	       it's value can never be 0.  */
1397 	    if (sub)
1398 		return "sbwc %N2,%0";
1399 	    return "adwc %2,%0";
1400 	  }
1401 
1402 	/* Add low parts.  */
1403 	if (rtx_equal_p (operands[0], operands[1]))
1404 	  {
1405 	    if (low[2] == const0_rtx)
1406 	/* Should examine operand, punt if not POST_INC.  */
1407 	      pattern = "tstl %0", carry = 0;
1408 	    else if (low[2] == const1_rtx)
1409 	      pattern = "incl %0";
1410 	    else
1411 	      pattern = "addl2 %2,%0";
1412 	  }
1413 	else
1414 	  {
1415 	    if (low[2] == const0_rtx)
1416 	      pattern = "movl %1,%0", carry = 0;
1417 	    else
1418 	      pattern = "addl3 %2,%1,%0";
1419 	  }
1420 	if (pattern)
1421 	  output_asm_insn (pattern, low);
1422 	if (!carry)
1423 	  /* If CARRY is 0, we don't have any carry value to worry about.  */
1424 	  return get_insn_template (CODE_FOR_addsi3, insn);
1425 	/* %0 = C + %1 + %2 */
1426 	if (!rtx_equal_p (operands[0], operands[1]))
1427 	  output_asm_insn ((operands[1] == const0_rtx
1428 			    ? "clrl %0"
1429 			    : "movl %1,%0"), operands);
1430 	return "adwc %2,%0";
1431       }
1432 
1433     case SImode:
1434       if (rtx_equal_p (operands[0], operands[1]))
1435 	{
1436 	  if (operands[2] == const1_rtx)
1437 	    return "incl %0";
1438 	  if (operands[2] == constm1_rtx)
1439 	    return "decl %0";
1440 	  if (CONST_INT_P (operands[2])
1441 	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1442 	    return "subl2 $%n2,%0";
1443 	  if (CONST_INT_P (operands[2])
1444 	      && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1445 	      && REG_P (operands[1])
1446 	      && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1447 		   || REGNO (operands[1]) > 11))
1448 	    return "movab %c2(%1),%0";
1449 	  if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1450 	    return "movab %a2[%0],%0";
1451 	  return "addl2 %2,%0";
1452 	}
1453 
1454       if (rtx_equal_p (operands[0], operands[2]))
1455 	{
1456 	  if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1457 	    return "movab %a1[%0],%0";
1458 	  return "addl2 %1,%0";
1459 	}
1460 
1461       if (CONST_INT_P (operands[2])
1462 	  && INTVAL (operands[2]) < 32767
1463 	  && INTVAL (operands[2]) > -32768
1464 	  && REG_P (operands[1])
1465 	  && push_operand (operands[0], SImode))
1466 	return "pushab %c2(%1)";
1467 
1468       if (CONST_INT_P (operands[2])
1469 	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1470 	return "subl3 $%n2,%1,%0";
1471 
1472       if (CONST_INT_P (operands[2])
1473 	  && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1474 	  && REG_P (operands[1])
1475 	  && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1476 	       || REGNO (operands[1]) > 11))
1477 	return "movab %c2(%1),%0";
1478 
1479       /* Add this if using gcc on a VAX 3xxx:
1480       if (REG_P (operands[1]) && REG_P (operands[2]))
1481 	return "movab (%1)[%2],%0";
1482       */
1483 
1484       if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1485 	{
1486 	  if (push_operand (operands[0], SImode))
1487 	    return "pushab %a2[%1]";
1488 	  return "movab %a2[%1],%0";
1489 	}
1490 
1491       if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1492 	{
1493 	  if (push_operand (operands[0], SImode))
1494 	    return "pushab %a1[%2]";
1495 	  return "movab %a1[%2],%0";
1496 	}
1497 
1498       if (flag_pic && REG_P (operands[0])
1499 	  && symbolic_operand (operands[2], SImode))
1500 	return "movab %a2,%0;addl2 %1,%0";
1501 
1502       if (flag_pic
1503 	  && (symbolic_operand (operands[1], SImode)
1504 	      || symbolic_operand (operands[1], SImode)))
1505 	debug_rtx (insn);
1506 
1507       return "addl3 %1,%2,%0";
1508 
1509     case HImode:
1510       if (rtx_equal_p (operands[0], operands[1]))
1511 	{
1512 	  if (operands[2] == const1_rtx)
1513 	    return "incw %0";
1514 	  if (operands[2] == constm1_rtx)
1515 	    return "decw %0";
1516 	  if (CONST_INT_P (operands[2])
1517 	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1518 	    return "subw2 $%n2,%0";
1519 	  return "addw2 %2,%0";
1520 	}
1521       if (rtx_equal_p (operands[0], operands[2]))
1522 	return "addw2 %1,%0";
1523       if (CONST_INT_P (operands[2])
1524 	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1525 	return "subw3 $%n2,%1,%0";
1526       return "addw3 %1,%2,%0";
1527 
1528     case QImode:
1529       if (rtx_equal_p (operands[0], operands[1]))
1530 	{
1531 	  if (operands[2] == const1_rtx)
1532 	    return "incb %0";
1533 	  if (operands[2] == constm1_rtx)
1534 	    return "decb %0";
1535 	  if (CONST_INT_P (operands[2])
1536 	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1537 	    return "subb2 $%n2,%0";
1538 	  return "addb2 %2,%0";
1539 	}
1540       if (rtx_equal_p (operands[0], operands[2]))
1541 	return "addb2 %1,%0";
1542       if (CONST_INT_P (operands[2])
1543 	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1544 	return "subb3 $%n2,%1,%0";
1545       return "addb3 %1,%2,%0";
1546 
1547     default:
1548       gcc_unreachable ();
1549     }
1550 }
1551 
1552 const char *
vax_output_int_subtract(rtx insn,rtx * operands,machine_mode mode)1553 vax_output_int_subtract (rtx insn, rtx *operands, machine_mode mode)
1554 {
1555   switch (mode)
1556     {
1557     case DImode:
1558       {
1559 	rtx low[3];
1560 	const char *pattern;
1561 	int carry = 1;
1562 
1563 	if (TARGET_QMATH && 0)
1564 	  debug_rtx (insn);
1565 
1566 	split_quadword_operands (insn, MINUS, operands, low, 3);
1567 
1568 	if (TARGET_QMATH)
1569 	  {
1570 	    if (operands[1] == const0_rtx && low[1] == const0_rtx)
1571 	      {
1572 		/* Negation is tricky.  It's basically complement and increment.
1573 		   Negate hi, then lo, and subtract the carry back.  */
1574 		if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1575 		    || (MEM_P (operands[0])
1576 			&& GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1577 		  fatal_insn ("illegal operand detected", insn);
1578 		output_asm_insn ("mnegl %2,%0", operands);
1579 		output_asm_insn ("mnegl %2,%0", low);
1580 		return "sbwc $0,%0";
1581 	      }
1582 	    gcc_assert (rtx_equal_p (operands[0], operands[1]));
1583 	    gcc_assert (rtx_equal_p (low[0], low[1]));
1584 	    if (low[2] == const1_rtx)
1585 	      output_asm_insn ("decl %0", low);
1586 	    else
1587 	      output_asm_insn ("subl2 %2,%0", low);
1588 	    return "sbwc %2,%0";
1589 	  }
1590 
1591 	/* Subtract low parts.  */
1592 	if (rtx_equal_p (operands[0], operands[1]))
1593 	  {
1594 	    if (low[2] == const0_rtx)
1595 	      pattern = 0, carry = 0;
1596 	    else if (low[2] == constm1_rtx)
1597 	      pattern = "decl %0";
1598 	    else
1599 	      pattern = "subl2 %2,%0";
1600 	  }
1601 	else
1602 	  {
1603 	    if (low[2] == constm1_rtx)
1604 	      pattern = "decl %0";
1605 	    else if (low[2] == const0_rtx)
1606 	      pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1607 	    else
1608 	      pattern = "subl3 %2,%1,%0";
1609 	  }
1610 	if (pattern)
1611 	  output_asm_insn (pattern, low);
1612 	if (carry)
1613 	  {
1614 	    if (!rtx_equal_p (operands[0], operands[1]))
1615 	      return "movl %1,%0;sbwc %2,%0";
1616 	    return "sbwc %2,%0";
1617 	    /* %0 = %2 - %1 - C */
1618 	  }
1619 	return get_insn_template (CODE_FOR_subsi3, insn);
1620       }
1621 
1622     default:
1623       gcc_unreachable ();
1624   }
1625 }
1626 
1627 /* True if X is an rtx for a constant that is a valid address.  */
1628 
1629 bool
legitimate_constant_address_p(rtx x)1630 legitimate_constant_address_p (rtx x)
1631 {
1632   if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1633 	  || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1634     return true;
1635   if (GET_CODE (x) != CONST)
1636     return false;
1637 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1638   if (flag_pic
1639       && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1640       && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1641     return false;
1642 #endif
1643    return true;
1644 }
1645 
1646 /* The other macros defined here are used only in legitimate_address_p ().  */
1647 
1648 /* Nonzero if X is a hard reg that can be used as an index
1649    or, if not strict, if it is a pseudo reg.  */
1650 #define	INDEX_REGISTER_P(X, STRICT) \
1651 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1652 
1653 /* Nonzero if X is a hard reg that can be used as a base reg
1654    or, if not strict, if it is a pseudo reg.  */
1655 #define	BASE_REGISTER_P(X, STRICT) \
1656 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1657 
1658 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1659 
1660 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1661    are no SYMBOL_REFs for external symbols present.  */
1662 
1663 static bool
indirectable_constant_address_p(rtx x,bool indirect)1664 indirectable_constant_address_p (rtx x, bool indirect)
1665 {
1666   if (GET_CODE (x) == SYMBOL_REF)
1667     return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1668 
1669   if (GET_CODE (x) == CONST)
1670     return !flag_pic
1671 	   || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1672 	   || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1673 
1674   return CONSTANT_ADDRESS_P (x);
1675 }
1676 
1677 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1678 
1679 static bool
indirectable_constant_address_p(rtx x,bool indirect ATTRIBUTE_UNUSED)1680 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1681 {
1682   return CONSTANT_ADDRESS_P (x);
1683 }
1684 
1685 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1686 
1687 /* True if X is an address which can be indirected.  External symbols
1688    could be in a sharable image library, so we disallow those.  */
1689 
1690 static bool
indirectable_address_p(rtx x,bool strict,bool indirect)1691 indirectable_address_p (rtx x, bool strict, bool indirect)
1692 {
1693   if (indirectable_constant_address_p (x, indirect)
1694       || BASE_REGISTER_P (x, strict))
1695     return true;
1696   if (GET_CODE (x) != PLUS
1697       || !BASE_REGISTER_P (XEXP (x, 0), strict)
1698       || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1699     return false;
1700   return indirectable_constant_address_p (XEXP (x, 1), indirect);
1701 }
1702 
1703 /* Return true if x is a valid address not using indexing.
1704    (This much is the easy part.)  */
1705 static bool
nonindexed_address_p(rtx x,bool strict)1706 nonindexed_address_p (rtx x, bool strict)
1707 {
1708   rtx xfoo0;
1709   if (REG_P (x))
1710     {
1711       if (! reload_in_progress
1712 	  || reg_equiv_mem (REGNO (x)) == 0
1713 	  || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1714 	return true;
1715     }
1716   if (indirectable_constant_address_p (x, false))
1717     return true;
1718   if (indirectable_address_p (x, strict, false))
1719     return true;
1720   xfoo0 = XEXP (x, 0);
1721   if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1722     return true;
1723   if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1724       && BASE_REGISTER_P (xfoo0, strict))
1725     return true;
1726   return false;
1727 }
1728 
1729 /* True if PROD is either a reg times size of mode MODE and MODE is less
1730    than or equal 8 bytes, or just a reg if MODE is one byte.  */
1731 
1732 static bool
index_term_p(rtx prod,machine_mode mode,bool strict)1733 index_term_p (rtx prod, machine_mode mode, bool strict)
1734 {
1735   rtx xfoo0, xfoo1;
1736 
1737   if (GET_MODE_SIZE (mode) == 1)
1738     return BASE_REGISTER_P (prod, strict);
1739 
1740   if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1741     return false;
1742 
1743   xfoo0 = XEXP (prod, 0);
1744   xfoo1 = XEXP (prod, 1);
1745 
1746   if (CONST_INT_P (xfoo0)
1747       && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1748       && INDEX_REGISTER_P (xfoo1, strict))
1749     return true;
1750 
1751   if (CONST_INT_P (xfoo1)
1752       && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1753       && INDEX_REGISTER_P (xfoo0, strict))
1754     return true;
1755 
1756   return false;
1757 }
1758 
1759 /* Return true if X is the sum of a register
1760    and a valid index term for mode MODE.  */
1761 static bool
reg_plus_index_p(rtx x,machine_mode mode,bool strict)1762 reg_plus_index_p (rtx x, machine_mode mode, bool strict)
1763 {
1764   rtx xfoo0, xfoo1;
1765 
1766   if (GET_CODE (x) != PLUS)
1767     return false;
1768 
1769   xfoo0 = XEXP (x, 0);
1770   xfoo1 = XEXP (x, 1);
1771 
1772   if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1773     return true;
1774 
1775   if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1776     return true;
1777 
1778   return false;
1779 }
1780 
1781 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address.  */
1782 static bool
indexable_address_p(rtx xfoo0,rtx xfoo1,machine_mode mode,bool strict)1783 indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
1784 {
1785   if (!CONSTANT_ADDRESS_P (xfoo0))
1786     return false;
1787   if (BASE_REGISTER_P (xfoo1, strict))
1788     return !flag_pic || mode == QImode;
1789   if (flag_pic && symbolic_operand (xfoo0, SImode))
1790     return false;
1791   return reg_plus_index_p (xfoo1, mode, strict);
1792 }
1793 
1794 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1795    that is a valid memory address for an instruction.
1796    The MODE argument is the machine mode for the MEM expression
1797    that wants to use this address.  */
1798 bool
vax_legitimate_address_p(machine_mode mode,rtx x,bool strict)1799 vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
1800 {
1801   rtx xfoo0, xfoo1;
1802 
1803   if (nonindexed_address_p (x, strict))
1804     return true;
1805 
1806   if (GET_CODE (x) != PLUS)
1807     return false;
1808 
1809   /* Handle <address>[index] represented with index-sum outermost */
1810 
1811   xfoo0 = XEXP (x, 0);
1812   xfoo1 = XEXP (x, 1);
1813 
1814   if (index_term_p (xfoo0, mode, strict)
1815       && nonindexed_address_p (xfoo1, strict))
1816     return true;
1817 
1818   if (index_term_p (xfoo1, mode, strict)
1819       && nonindexed_address_p (xfoo0, strict))
1820     return true;
1821 
1822   /* Handle offset(reg)[index] with offset added outermost */
1823 
1824   if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1825       || indexable_address_p (xfoo1, xfoo0, mode, strict))
1826     return true;
1827 
1828   return false;
1829 }
1830 
1831 /* Return true if x (a legitimate address expression) has an effect that
1832    depends on the machine mode it is used for.  On the VAX, the predecrement
1833    and postincrement address depend thus (the amount of decrement or
1834    increment being the length of the operand) and all indexed address depend
1835    thus (because the index scale factor is the length of the operand).  */
1836 
1837 static bool
vax_mode_dependent_address_p(const_rtx x,addr_space_t as ATTRIBUTE_UNUSED)1838 vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
1839 {
1840   rtx xfoo0, xfoo1;
1841 
1842   /* Auto-increment cases are now dealt with generically in recog.c.  */
1843   if (GET_CODE (x) != PLUS)
1844     return false;
1845 
1846   xfoo0 = XEXP (x, 0);
1847   xfoo1 = XEXP (x, 1);
1848 
1849   if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1850     return false;
1851   if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1852     return false;
1853   if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1854     return false;
1855   if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1856     return false;
1857 
1858   return true;
1859 }
1860 
1861 static rtx
fixup_mathdi_operand(rtx x,machine_mode mode)1862 fixup_mathdi_operand (rtx x, machine_mode mode)
1863 {
1864   if (illegal_addsub_di_memory_operand (x, mode))
1865     {
1866       rtx addr = XEXP (x, 0);
1867       rtx temp = gen_reg_rtx (Pmode);
1868       rtx offset = 0;
1869 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1870       if (GET_CODE (addr) == CONST && flag_pic)
1871 	{
1872 	  offset = XEXP (XEXP (addr, 0), 1);
1873 	  addr = XEXP (XEXP (addr, 0), 0);
1874 	}
1875 #endif
1876       emit_move_insn (temp, addr);
1877       if (offset)
1878 	temp = gen_rtx_PLUS (Pmode, temp, offset);
1879       x = gen_rtx_MEM (DImode, temp);
1880     }
1881   return x;
1882 }
1883 
1884 void
vax_expand_addsub_di_operands(rtx * operands,enum rtx_code code)1885 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1886 {
1887   int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1888   rtx temp;
1889 
1890   rtx (*gen_old_insn)(rtx, rtx, rtx);
1891   rtx (*gen_si_insn)(rtx, rtx, rtx);
1892   rtx (*gen_insn)(rtx, rtx, rtx);
1893 
1894   if (code == PLUS)
1895     {
1896       gen_old_insn = gen_adddi3_old;
1897       gen_si_insn = gen_addsi3;
1898       gen_insn = gen_adcdi3;
1899     }
1900   else if (code == MINUS)
1901     {
1902       gen_old_insn = gen_subdi3_old;
1903       gen_si_insn = gen_subsi3;
1904       gen_insn = gen_sbcdi3;
1905     }
1906   else
1907     gcc_unreachable ();
1908 
1909   /* If this is addition (thus operands are commutative) and if there is one
1910      addend that duplicates the desination, we want that addend to be the
1911      first addend.  */
1912   if (code == PLUS
1913       && rtx_equal_p (operands[0], operands[2])
1914       && !rtx_equal_p (operands[1], operands[2]))
1915     {
1916       temp = operands[2];
1917       operands[2] = operands[1];
1918       operands[1] = temp;
1919     }
1920 
1921   if (!TARGET_QMATH)
1922     {
1923       emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1924     }
1925   else if (hi_only)
1926     {
1927       if (!rtx_equal_p (operands[0], operands[1])
1928 	  && (REG_P (operands[0]) && MEM_P (operands[1])))
1929 	{
1930 	  emit_move_insn (operands[0], operands[1]);
1931 	  operands[1] = operands[0];
1932 	}
1933 
1934       operands[0] = fixup_mathdi_operand (operands[0], DImode);
1935       operands[1] = fixup_mathdi_operand (operands[1], DImode);
1936       operands[2] = fixup_mathdi_operand (operands[2], DImode);
1937 
1938       if (!rtx_equal_p (operands[0], operands[1]))
1939 	emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1940 			  operand_subword (operands[1], 0, 0, DImode));
1941 
1942       emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1943 				 operand_subword (operands[1], 1, 0, DImode),
1944 				 operand_subword (operands[2], 1, 0, DImode)));
1945     }
1946   else
1947     {
1948       /* If are adding the same value together, that's really a multiply by 2,
1949 	 and that's just a left shift of 1.  */
1950       if (rtx_equal_p (operands[1], operands[2]))
1951 	{
1952 	  gcc_assert (code != MINUS);
1953 	  emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1954 	  return;
1955 	}
1956 
1957       operands[0] = fixup_mathdi_operand (operands[0], DImode);
1958 
1959       /* If an operand is the same as operand[0], use the operand[0] rtx
1960 	 because fixup will an equivalent rtx but not an equal one. */
1961 
1962       if (rtx_equal_p (operands[0], operands[1]))
1963 	operands[1] = operands[0];
1964       else
1965 	operands[1] = fixup_mathdi_operand (operands[1], DImode);
1966 
1967       if (rtx_equal_p (operands[0], operands[2]))
1968 	operands[2] = operands[0];
1969       else
1970 	operands[2] = fixup_mathdi_operand (operands[2], DImode);
1971 
1972       /* If we are subtracting not from ourselves [d = a - b], and because the
1973 	 carry ops are two operand only, we would need to do a move prior to
1974 	 the subtract.  And if d == b, we would need a temp otherwise
1975 	 [d = a, d -= d] and we end up with 0.  Instead we rewrite d = a - b
1976 	 into d = -b, d += a.  Since -b can never overflow, even if b == d,
1977 	 no temp is needed.
1978 
1979 	 If we are doing addition, since the carry ops are two operand, if
1980 	 we aren't adding to ourselves, move the first addend to the
1981 	 destination first.  */
1982 
1983       gcc_assert (operands[1] != const0_rtx || code == MINUS);
1984       if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1985 	{
1986 	  if (code == MINUS && CONSTANT_P (operands[1]))
1987 	    {
1988 	      temp = gen_reg_rtx (DImode);
1989 	      emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1990 	      code = PLUS;
1991 	      gen_insn = gen_adcdi3;
1992 	      operands[2] = operands[1];
1993 	      operands[1] = operands[0];
1994 	    }
1995 	  else
1996 	    emit_move_insn (operands[0], operands[1]);
1997 	}
1998 
1999       /* Subtracting a constant will have been rewritten to an addition of the
2000 	 negative of that constant before we get here.  */
2001       gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2002       emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2003     }
2004 }
2005 
2006 bool
adjacent_operands_p(rtx lo,rtx hi,machine_mode mode)2007 adjacent_operands_p (rtx lo, rtx hi, machine_mode mode)
2008 {
2009   HOST_WIDE_INT lo_offset;
2010   HOST_WIDE_INT hi_offset;
2011 
2012   if (GET_CODE (lo) != GET_CODE (hi))
2013     return false;
2014 
2015   if (REG_P (lo))
2016     return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
2017   if (CONST_INT_P (lo))
2018     return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
2019   if (CONST_INT_P (lo))
2020     return mode != SImode;
2021 
2022   if (!MEM_P (lo))
2023     return false;
2024 
2025   if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2026     return false;
2027 
2028   lo = XEXP (lo, 0);
2029   hi = XEXP (hi, 0);
2030 
2031   if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2032     return rtx_equal_p (lo, hi);
2033 
2034   switch (GET_CODE (lo))
2035     {
2036     case REG:
2037     case SYMBOL_REF:
2038       lo_offset = 0;
2039       break;
2040     case CONST:
2041       lo = XEXP (lo, 0);
2042       /* FALLTHROUGH */
2043     case PLUS:
2044       if (!CONST_INT_P (XEXP (lo, 1)))
2045 	return false;
2046       lo_offset = INTVAL (XEXP (lo, 1));
2047       lo = XEXP (lo, 0);
2048       break;
2049     default:
2050       return false;
2051     }
2052 
2053   switch (GET_CODE (hi))
2054     {
2055     case REG:
2056     case SYMBOL_REF:
2057       hi_offset = 0;
2058       break;
2059     case CONST:
2060       hi = XEXP (hi, 0);
2061       /* FALLTHROUGH */
2062     case PLUS:
2063       if (!CONST_INT_P (XEXP (hi, 1)))
2064 	return false;
2065       hi_offset = INTVAL (XEXP (hi, 1));
2066       hi = XEXP (hi, 0);
2067       break;
2068     default:
2069       return false;
2070     }
2071 
2072   if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2073     return false;
2074 
2075   return rtx_equal_p (lo, hi)
2076 	 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2077 }
2078 
2079 /* Output assembler code for a block containing the constant parts
2080    of a trampoline, leaving space for the variable parts.  */
2081 
2082 /* On the VAX, the trampoline contains an entry mask and two instructions:
2083      .word NN
2084      movl $STATIC,r0   (store the functions static chain)
2085      jmp  *$FUNCTION   (jump to function code at address FUNCTION)  */
2086 
2087 static void
vax_asm_trampoline_template(FILE * f ATTRIBUTE_UNUSED)2088 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2089 {
2090   assemble_aligned_integer (2, const0_rtx);
2091   assemble_aligned_integer (2, GEN_INT (0x8fd0));
2092   assemble_aligned_integer (4, const0_rtx);
2093   assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2094   assemble_aligned_integer (2, GEN_INT (0x9f17));
2095   assemble_aligned_integer (4, const0_rtx);
2096 }
2097 
2098 /* We copy the register-mask from the function's pure code
2099    to the start of the trampoline.  */
2100 
2101 static void
vax_trampoline_init(rtx m_tramp,tree fndecl,rtx cxt)2102 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2103 {
2104   rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2105   rtx mem;
2106 
2107   emit_block_move (m_tramp, assemble_trampoline_template (),
2108 		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2109 
2110   mem = adjust_address (m_tramp, HImode, 0);
2111   emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2112 
2113   mem = adjust_address (m_tramp, SImode, 4);
2114   emit_move_insn (mem, cxt);
2115   mem = adjust_address (m_tramp, SImode, 11);
2116   emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
2117   emit_insn (gen_sync_istream ());
2118 }
2119 
2120 /* Value is the number of bytes of arguments automatically
2121    popped when returning from a subroutine call.
2122    FUNDECL is the declaration node of the function (as a tree),
2123    FUNTYPE is the data type of the function (as a tree),
2124    or for a library call it is an identifier node for the subroutine name.
2125    SIZE is the number of bytes of arguments passed on the stack.
2126 
2127    On the VAX, the RET insn pops a maximum of 255 args for any function.  */
2128 
2129 static int
vax_return_pops_args(tree fundecl ATTRIBUTE_UNUSED,tree funtype ATTRIBUTE_UNUSED,int size)2130 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2131 		      tree funtype ATTRIBUTE_UNUSED, int size)
2132 {
2133   return size > 255 * 4 ? 0 : size;
2134 }
2135 
2136 /* Define where to put the arguments to a function.
2137    Value is zero to push the argument on the stack,
2138    or a hard register in which to store the argument.
2139 
2140    MODE is the argument's machine mode.
2141    TYPE is the data type of the argument (as a tree).
2142     This is null for libcalls where that information may
2143     not be available.
2144    CUM is a variable of type CUMULATIVE_ARGS which gives info about
2145     the preceding args and about the function being called.
2146    NAMED is nonzero if this argument is a named parameter
2147     (otherwise it is an extra parameter matching an ellipsis).  */
2148 
2149 /* On the VAX all args are pushed.  */
2150 
2151 static rtx
vax_function_arg(cumulative_args_t cum ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,const_tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)2152 vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
2153 		  machine_mode mode ATTRIBUTE_UNUSED,
2154 		  const_tree type ATTRIBUTE_UNUSED,
2155 		  bool named ATTRIBUTE_UNUSED)
2156 {
2157   return NULL_RTX;
2158 }
2159 
2160 /* Update the data in CUM to advance over an argument of mode MODE and
2161    data type TYPE.  (TYPE is null for libcalls where that information
2162    may not be available.)  */
2163 
2164 static void
vax_function_arg_advance(cumulative_args_t cum_v,machine_mode mode,const_tree type,bool named ATTRIBUTE_UNUSED)2165 vax_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2166 			  const_tree type, bool named ATTRIBUTE_UNUSED)
2167 {
2168   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2169 
2170   *cum += (mode != BLKmode
2171 	   ? (GET_MODE_SIZE (mode) + 3) & ~3
2172 	   : (int_size_in_bytes (type) + 3) & ~3);
2173 }
2174