xref: /openbsd/gnu/usr.bin/gcc/gcc/config/s390/s390.c (revision 4e43c760)
1 /* Subroutines used for code generation on IBM S/390 and zSeries
2    Copyright (C) 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3    Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4                   Ulrich Weigand (uweigand@de.ibm.com).
5 
6 This file is part of GNU CC.
7 
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12 
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING.  If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "except.h"
37 #include "function.h"
38 #include "recog.h"
39 #include "expr.h"
40 #include "reload.h"
41 #include "toplev.h"
42 #include "basic-block.h"
43 #include "integrate.h"
44 #include "ggc.h"
45 #include "target.h"
46 #include "target-def.h"
47 #include "debug.h"
48 #include "langhooks.h"
49 #include "optabs.h"
50 
51 static bool s390_assemble_integer PARAMS ((rtx, unsigned int, int));
52 static int s390_adjust_cost PARAMS ((rtx, rtx, rtx, int));
53 static int s390_adjust_priority PARAMS ((rtx, int));
54 static void s390_select_rtx_section PARAMS ((enum machine_mode, rtx,
55 					     unsigned HOST_WIDE_INT));
56 static void s390_encode_section_info PARAMS ((tree, int));
57 static const char *s390_strip_name_encoding PARAMS ((const char *));
58 static bool s390_cannot_force_const_mem PARAMS ((rtx));
59 static void s390_init_builtins PARAMS ((void));
60 static rtx s390_expand_builtin PARAMS ((tree, rtx, rtx,
61 					enum machine_mode, int));
62 static void s390_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
63 					  HOST_WIDE_INT, tree));
64 
65 #undef  TARGET_ASM_ALIGNED_HI_OP
66 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
67 #undef  TARGET_ASM_ALIGNED_DI_OP
68 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
69 #undef  TARGET_ASM_INTEGER
70 #define TARGET_ASM_INTEGER s390_assemble_integer
71 
72 #undef  TARGET_ASM_OPEN_PAREN
73 #define TARGET_ASM_OPEN_PAREN ""
74 
75 #undef  TARGET_ASM_CLOSE_PAREN
76 #define TARGET_ASM_CLOSE_PAREN ""
77 
78 #undef	TARGET_ASM_SELECT_RTX_SECTION
79 #define	TARGET_ASM_SELECT_RTX_SECTION  s390_select_rtx_section
80 
81 #undef  TARGET_SCHED_ADJUST_COST
82 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
83 
84 #undef  TARGET_SCHED_ADJUST_PRIORITY
85 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
86 
87 #undef	TARGET_ENCODE_SECTION_INFO
88 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
89 #undef  TARGET_STRIP_NAME_ENCODING
90 #define TARGET_STRIP_NAME_ENCODING s390_strip_name_encoding
91 
92 #ifdef HAVE_AS_TLS
93 #undef TARGET_HAVE_TLS
94 #define TARGET_HAVE_TLS true
95 #endif
96 #undef TARGET_CANNOT_FORCE_CONST_MEM
97 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
98 
99 #undef  TARGET_INIT_BUILTINS
100 #define TARGET_INIT_BUILTINS s390_init_builtins
101 #undef  TARGET_EXPAND_BUILTIN
102 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
103 
104 #undef TARGET_ASM_OUTPUT_MI_THUNK
105 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
106 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
107 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
108 
109 struct gcc_target targetm = TARGET_INITIALIZER;
110 
111 extern int reload_completed;
112 
113 /* The alias set for prologue/epilogue register save/restore.  */
114 static int s390_sr_alias_set = 0;
115 
116 /* Save information from a "cmpxx" operation until the branch or scc is
117    emitted.  */
118 rtx s390_compare_op0, s390_compare_op1;
119 
120 /* The encoding characters for the four TLS models present in ELF.  */
121 static char const tls_model_chars[] = " GLil";
122 
123 /* Structure used to hold the components of a S/390 memory
124    address.  A legitimate address on S/390 is of the general
125    form
126           base + index + displacement
127    where any of the components is optional.
128 
129    base and index are registers of the class ADDR_REGS,
130    displacement is an unsigned 12-bit immediate constant.  */
131 
132 struct s390_address
133 {
134   rtx base;
135   rtx indx;
136   rtx disp;
137   int pointer;
138 };
139 
140 /* Define the structure for the machine field in struct function.  */
141 
142 struct machine_function GTY(())
143 {
144   /* Label of start of initial literal pool.  */
145   rtx literal_pool_label;
146 
147   /* Set, if some of the fprs 8-15 need to be saved (64 bit abi).  */
148   int save_fprs_p;
149 
150   /* Number of first and last gpr to be saved, restored.  */
151   int first_save_gpr;
152   int first_restore_gpr;
153   int last_save_gpr;
154 
155   /* Size of stack frame.  */
156   HOST_WIDE_INT frame_size;
157 
158   /* Some local-dynamic TLS symbol name.  */
159   const char *some_ld_name;
160 };
161 
162 static int s390_match_ccmode_set PARAMS ((rtx, enum machine_mode));
163 static int s390_branch_condition_mask PARAMS ((rtx));
164 static const char *s390_branch_condition_mnemonic PARAMS ((rtx, int));
165 static int check_mode PARAMS ((rtx, enum machine_mode *));
166 static int general_s_operand PARAMS ((rtx, enum machine_mode, int));
167 static int s390_decompose_address PARAMS ((rtx, struct s390_address *));
168 static rtx get_thread_pointer PARAMS ((void));
169 static rtx legitimize_tls_address PARAMS ((rtx, rtx));
170 static const char *get_some_local_dynamic_name PARAMS ((void));
171 static int get_some_local_dynamic_name_1 PARAMS ((rtx *, void *));
172 static int reg_used_in_mem_p PARAMS ((int, rtx));
173 static int addr_generation_dependency_p PARAMS ((rtx, rtx));
174 static int s390_split_branches PARAMS ((rtx, bool *));
175 static void find_constant_pool_ref PARAMS ((rtx, rtx *));
176 static void replace_constant_pool_ref PARAMS ((rtx *, rtx, rtx));
177 static int find_base_register_in_addr PARAMS ((struct s390_address *));
178 static bool find_base_register_ref PARAMS ((rtx));
179 static void replace_base_register_ref PARAMS ((rtx *, rtx));
180 static void s390_optimize_prolog PARAMS ((int));
181 static bool s390_fixup_clobbered_return_reg PARAMS ((rtx));
182 static int find_unused_clobbered_reg PARAMS ((void));
183 static void s390_frame_info PARAMS ((void));
184 static rtx save_fpr PARAMS ((rtx, int, int));
185 static rtx restore_fpr PARAMS ((rtx, int, int));
186 static rtx save_gprs PARAMS ((rtx, int, int, int));
187 static rtx restore_gprs PARAMS ((rtx, int, int, int));
188 static int s390_function_arg_size PARAMS ((enum machine_mode, tree));
189 static struct machine_function * s390_init_machine_status PARAMS ((void));
190 
191 /* Return true if SET either doesn't set the CC register, or else
192    the source and destination have matching CC modes and that
193    CC mode is at least as constrained as REQ_MODE.  */
194 
195 static int
s390_match_ccmode_set(set,req_mode)196 s390_match_ccmode_set (set, req_mode)
197      rtx set;
198      enum machine_mode req_mode;
199 {
200   enum machine_mode set_mode;
201 
202   if (GET_CODE (set) != SET)
203     abort ();
204 
205   if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
206     return 1;
207 
208   set_mode = GET_MODE (SET_DEST (set));
209   switch (set_mode)
210     {
211     case CCSmode:
212     case CCSRmode:
213     case CCUmode:
214     case CCURmode:
215     case CCLmode:
216     case CCL1mode:
217     case CCL2mode:
218     case CCT1mode:
219     case CCT2mode:
220     case CCT3mode:
221       if (req_mode != set_mode)
222         return 0;
223       break;
224 
225     case CCZmode:
226       if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
227 	  && req_mode != CCSRmode && req_mode != CCURmode)
228         return 0;
229       break;
230 
231     case CCAPmode:
232     case CCANmode:
233       if (req_mode != CCAmode)
234         return 0;
235       break;
236 
237     default:
238       abort ();
239     }
240 
241   return (GET_MODE (SET_SRC (set)) == set_mode);
242 }
243 
244 /* Return true if every SET in INSN that sets the CC register
245    has source and destination with matching CC modes and that
246    CC mode is at least as constrained as REQ_MODE.
247    If REQ_MODE is VOIDmode, always return false.  */
248 
249 int
s390_match_ccmode(insn,req_mode)250 s390_match_ccmode (insn, req_mode)
251      rtx insn;
252      enum machine_mode req_mode;
253 {
254   int i;
255 
256   /* s390_tm_ccmode returns VOIDmode to indicate failure.  */
257   if (req_mode == VOIDmode)
258     return 0;
259 
260   if (GET_CODE (PATTERN (insn)) == SET)
261     return s390_match_ccmode_set (PATTERN (insn), req_mode);
262 
263   if (GET_CODE (PATTERN (insn)) == PARALLEL)
264       for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
265         {
266           rtx set = XVECEXP (PATTERN (insn), 0, i);
267           if (GET_CODE (set) == SET)
268             if (!s390_match_ccmode_set (set, req_mode))
269               return 0;
270         }
271 
272   return 1;
273 }
274 
275 /* If a test-under-mask instruction can be used to implement
276    (compare (and ... OP1) OP2), return the CC mode required
277    to do that.  Otherwise, return VOIDmode.
278    MIXED is true if the instruction can distinguish between
279    CC1 and CC2 for mixed selected bits (TMxx), it is false
280    if the instruction cannot (TM).  */
281 
282 enum machine_mode
s390_tm_ccmode(op1,op2,mixed)283 s390_tm_ccmode (op1, op2, mixed)
284      rtx op1;
285      rtx op2;
286      int mixed;
287 {
288   int bit0, bit1;
289 
290   /* ??? Fixme: should work on CONST_DOUBLE as well.  */
291   if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
292     return VOIDmode;
293 
294   /* Selected bits all zero: CC0.  */
295   if (INTVAL (op2) == 0)
296     return CCTmode;
297 
298   /* Selected bits all one: CC3.  */
299   if (INTVAL (op2) == INTVAL (op1))
300     return CCT3mode;
301 
302   /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2.  */
303   if (mixed)
304     {
305       bit1 = exact_log2 (INTVAL (op2));
306       bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
307       if (bit0 != -1 && bit1 != -1)
308         return bit0 > bit1 ? CCT1mode : CCT2mode;
309     }
310 
311   return VOIDmode;
312 }
313 
314 /* Given a comparison code OP (EQ, NE, etc.) and the operands
315    OP0 and OP1 of a COMPARE, return the mode to be used for the
316    comparison.  */
317 
318 enum machine_mode
s390_select_ccmode(code,op0,op1)319 s390_select_ccmode (code, op0, op1)
320      enum rtx_code code;
321      rtx op0;
322      rtx op1;
323 {
324   switch (code)
325     {
326       case EQ:
327       case NE:
328 	if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
329 	    && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
330 	  return CCAPmode;
331 	if (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
332 	    || GET_CODE (op1) == NEG)
333 	  return CCLmode;
334 
335 	if (GET_CODE (op0) == AND)
336 	  {
337 	    /* Check whether we can potentially do it via TM.  */
338 	    enum machine_mode ccmode;
339 	    ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
340 	    if (ccmode != VOIDmode)
341 	      {
342 		/* Relax CCTmode to CCZmode to allow fall-back to AND
343 		   if that turns out to be beneficial.  */
344 	        return ccmode == CCTmode ? CCZmode : ccmode;
345 	      }
346 	  }
347 
348 	if (register_operand (op0, HImode)
349 	    && GET_CODE (op1) == CONST_INT
350 	    && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
351 	  return CCT3mode;
352 	if (register_operand (op0, QImode)
353 	    && GET_CODE (op1) == CONST_INT
354 	    && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
355 	  return CCT3mode;
356 
357 	return CCZmode;
358 
359       case LE:
360       case LT:
361       case GE:
362       case GT:
363 	  if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
364 	      && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
365             {
366 	      if (INTVAL (XEXP((op0), 1)) < 0)
367 	        return CCANmode;
368               else
369 	        return CCAPmode;
370 	    }
371       case UNORDERED:
372       case ORDERED:
373       case UNEQ:
374       case UNLE:
375       case UNLT:
376       case UNGE:
377       case UNGT:
378       case LTGT:
379 	if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
380 	    && GET_CODE (op1) != CONST_INT)
381 	  return CCSRmode;
382 	return CCSmode;
383 
384       case LTU:
385       case GEU:
386 	if (GET_CODE (op0) == PLUS)
387 	  return CCL1mode;
388 
389 	if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
390 	    && GET_CODE (op1) != CONST_INT)
391 	  return CCURmode;
392 	return CCUmode;
393 
394       case LEU:
395       case GTU:
396 	if (GET_CODE (op0) == MINUS)
397 	  return CCL2mode;
398 
399 	if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
400 	    && GET_CODE (op1) != CONST_INT)
401 	  return CCURmode;
402 	return CCUmode;
403 
404       default:
405 	abort ();
406     }
407 }
408 
409 /* Return branch condition mask to implement a branch
410    specified by CODE.  */
411 
412 static int
s390_branch_condition_mask(code)413 s390_branch_condition_mask (code)
414     rtx code;
415 {
416   const int CC0 = 1 << 3;
417   const int CC1 = 1 << 2;
418   const int CC2 = 1 << 1;
419   const int CC3 = 1 << 0;
420 
421   if (GET_CODE (XEXP (code, 0)) != REG
422       || REGNO (XEXP (code, 0)) != CC_REGNUM
423       || XEXP (code, 1) != const0_rtx)
424     abort ();
425 
426   switch (GET_MODE (XEXP (code, 0)))
427     {
428     case CCZmode:
429       switch (GET_CODE (code))
430         {
431         case EQ:	return CC0;
432 	case NE:	return CC1 | CC2 | CC3;
433 	default:
434 	  abort ();
435         }
436       break;
437 
438     case CCT1mode:
439       switch (GET_CODE (code))
440         {
441         case EQ:	return CC1;
442 	case NE:	return CC0 | CC2 | CC3;
443 	default:
444 	  abort ();
445         }
446       break;
447 
448     case CCT2mode:
449       switch (GET_CODE (code))
450         {
451         case EQ:	return CC2;
452 	case NE:	return CC0 | CC1 | CC3;
453 	default:
454 	  abort ();
455         }
456       break;
457 
458     case CCT3mode:
459       switch (GET_CODE (code))
460         {
461         case EQ:	return CC3;
462 	case NE:	return CC0 | CC1 | CC2;
463 	default:
464 	  abort ();
465         }
466       break;
467 
468     case CCLmode:
469       switch (GET_CODE (code))
470         {
471         case EQ:	return CC0 | CC2;
472 	case NE:	return CC1 | CC3;
473 	default:
474 	  abort ();
475         }
476       break;
477 
478     case CCL1mode:
479       switch (GET_CODE (code))
480         {
481 	case LTU:	return CC2 | CC3;  /* carry */
482 	case GEU:	return CC0 | CC1;  /* no carry */
483 	default:
484 	  abort ();
485         }
486       break;
487 
488     case CCL2mode:
489       switch (GET_CODE (code))
490         {
491 	case GTU:	return CC0 | CC1;  /* borrow */
492 	case LEU:	return CC2 | CC3;  /* no borrow */
493 	default:
494 	  abort ();
495         }
496       break;
497 
498     case CCUmode:
499       switch (GET_CODE (code))
500         {
501         case EQ:	return CC0;
502         case NE:	return CC1 | CC2 | CC3;
503         case LTU:	return CC1;
504         case GTU:	return CC2;
505         case LEU:	return CC0 | CC1;
506         case GEU:	return CC0 | CC2;
507 	default:
508 	  abort ();
509         }
510       break;
511 
512     case CCURmode:
513       switch (GET_CODE (code))
514         {
515         case EQ:	return CC0;
516         case NE:	return CC2 | CC1 | CC3;
517         case LTU:	return CC2;
518         case GTU:	return CC1;
519         case LEU:	return CC0 | CC2;
520         case GEU:	return CC0 | CC1;
521 	default:
522 	  abort ();
523         }
524       break;
525 
526     case CCAPmode:
527       switch (GET_CODE (code))
528         {
529         case EQ:	return CC0;
530         case NE:	return CC1 | CC2 | CC3;
531         case LT:	return CC1 | CC3;
532         case GT:	return CC2;
533         case LE:	return CC0 | CC1 | CC3;
534         case GE:	return CC0 | CC2;
535 	default:
536 	  abort ();
537         }
538       break;
539 
540     case CCANmode:
541       switch (GET_CODE (code))
542         {
543         case EQ:	return CC0;
544         case NE:	return CC1 | CC2 | CC3;
545         case LT:	return CC1;
546         case GT:	return CC2 | CC3;
547         case LE:	return CC0 | CC1;
548         case GE:	return CC0 | CC2 | CC3;
549 	default:
550 	  abort ();
551         }
552       break;
553 
554     case CCSmode:
555       switch (GET_CODE (code))
556         {
557         case EQ:	return CC0;
558         case NE:	return CC1 | CC2 | CC3;
559         case LT:	return CC1;
560         case GT:	return CC2;
561         case LE:	return CC0 | CC1;
562         case GE:	return CC0 | CC2;
563 	case UNORDERED:	return CC3;
564 	case ORDERED:	return CC0 | CC1 | CC2;
565 	case UNEQ:	return CC0 | CC3;
566         case UNLT:	return CC1 | CC3;
567         case UNGT:	return CC2 | CC3;
568         case UNLE:	return CC0 | CC1 | CC3;
569         case UNGE:	return CC0 | CC2 | CC3;
570 	case LTGT:	return CC1 | CC2;
571 	default:
572 	  abort ();
573         }
574       break;
575 
576     case CCSRmode:
577       switch (GET_CODE (code))
578         {
579         case EQ:	return CC0;
580         case NE:	return CC2 | CC1 | CC3;
581         case LT:	return CC2;
582         case GT:	return CC1;
583         case LE:	return CC0 | CC2;
584         case GE:	return CC0 | CC1;
585 	case UNORDERED:	return CC3;
586 	case ORDERED:	return CC0 | CC2 | CC1;
587 	case UNEQ:	return CC0 | CC3;
588         case UNLT:	return CC2 | CC3;
589         case UNGT:	return CC1 | CC3;
590         case UNLE:	return CC0 | CC2 | CC3;
591         case UNGE:	return CC0 | CC1 | CC3;
592 	case LTGT:	return CC2 | CC1;
593 	default:
594 	  abort ();
595         }
596       break;
597 
598     default:
599       abort ();
600     }
601 }
602 
603 /* If INV is false, return assembler mnemonic string to implement
604    a branch specified by CODE.  If INV is true, return mnemonic
605    for the corresponding inverted branch.  */
606 
607 static const char *
s390_branch_condition_mnemonic(code,inv)608 s390_branch_condition_mnemonic (code, inv)
609      rtx code;
610      int inv;
611 {
612   static const char *const mnemonic[16] =
613     {
614       NULL, "o", "h", "nle",
615       "l", "nhe", "lh", "ne",
616       "e", "nlh", "he", "nl",
617       "le", "nh", "no", NULL
618     };
619 
620   int mask = s390_branch_condition_mask (code);
621 
622   if (inv)
623     mask ^= 15;
624 
625   if (mask < 1 || mask > 14)
626     abort ();
627 
628   return mnemonic[mask];
629 }
630 
631 /* If OP is an integer constant of mode MODE with exactly one
632    HImode subpart unequal to DEF, return the number of that
633    subpart.  As a special case, all HImode subparts of OP are
634    equal to DEF, return zero.  Otherwise, return -1.  */
635 
636 int
s390_single_hi(op,mode,def)637 s390_single_hi (op, mode, def)
638      rtx op;
639      enum machine_mode mode;
640      int def;
641 {
642   if (GET_CODE (op) == CONST_INT)
643     {
644       unsigned HOST_WIDE_INT value = 0;
645       int n_parts = GET_MODE_SIZE (mode) / 2;
646       int i, part = -1;
647 
648       for (i = 0; i < n_parts; i++)
649         {
650           if (i == 0)
651             value = (unsigned HOST_WIDE_INT) INTVAL (op);
652           else
653             value >>= 16;
654 
655           if ((value & 0xffff) != (unsigned)(def & 0xffff))
656             {
657               if (part != -1)
658                 return -1;
659               else
660                 part = i;
661             }
662         }
663 
664       return part == -1 ? 0 : (n_parts - 1 - part);
665     }
666 
667   else if (GET_CODE (op) == CONST_DOUBLE
668            && GET_MODE (op) == VOIDmode)
669     {
670       unsigned HOST_WIDE_INT value = 0;
671       int n_parts = GET_MODE_SIZE (mode) / 2;
672       int i, part = -1;
673 
674       for (i = 0; i < n_parts; i++)
675         {
676           if (i == 0)
677             value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
678           else if (i == HOST_BITS_PER_WIDE_INT / 16)
679             value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
680           else
681             value >>= 16;
682 
683           if ((value & 0xffff) != (unsigned)(def & 0xffff))
684             {
685               if (part != -1)
686                 return -1;
687               else
688                 part = i;
689             }
690         }
691 
692       return part == -1 ? 0 : (n_parts - 1 - part);
693     }
694 
695   return -1;
696 }
697 
698 /* Extract the HImode part number PART from integer
699    constant OP of mode MODE.  */
700 
701 int
s390_extract_hi(op,mode,part)702 s390_extract_hi (op, mode, part)
703     rtx op;
704     enum machine_mode mode;
705     int part;
706 {
707   int n_parts = GET_MODE_SIZE (mode) / 2;
708   if (part < 0 || part >= n_parts)
709     abort();
710   else
711     part = n_parts - 1 - part;
712 
713   if (GET_CODE (op) == CONST_INT)
714     {
715       unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
716       return ((value >> (16 * part)) & 0xffff);
717     }
718   else if (GET_CODE (op) == CONST_DOUBLE
719            && GET_MODE (op) == VOIDmode)
720     {
721       unsigned HOST_WIDE_INT value;
722       if (part < HOST_BITS_PER_WIDE_INT / 16)
723         value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
724       else
725         value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
726         part -= HOST_BITS_PER_WIDE_INT / 16;
727 
728       return ((value >> (16 * part)) & 0xffff);
729     }
730 
731   abort ();
732 }
733 
734 /* If OP is an integer constant of mode MODE with exactly one
735    QImode subpart unequal to DEF, return the number of that
736    subpart.  As a special case, all QImode subparts of OP are
737    equal to DEF, return zero.  Otherwise, return -1.  */
738 
739 int
s390_single_qi(op,mode,def)740 s390_single_qi (op, mode, def)
741      rtx op;
742      enum machine_mode mode;
743      int def;
744 {
745   if (GET_CODE (op) == CONST_INT)
746     {
747       unsigned HOST_WIDE_INT value = 0;
748       int n_parts = GET_MODE_SIZE (mode);
749       int i, part = -1;
750 
751       for (i = 0; i < n_parts; i++)
752         {
753           if (i == 0)
754             value = (unsigned HOST_WIDE_INT) INTVAL (op);
755           else
756             value >>= 8;
757 
758           if ((value & 0xff) != (unsigned)(def & 0xff))
759             {
760               if (part != -1)
761                 return -1;
762               else
763                 part = i;
764             }
765         }
766 
767       return part == -1 ? 0 : (n_parts - 1 - part);
768     }
769 
770   else if (GET_CODE (op) == CONST_DOUBLE
771            && GET_MODE (op) == VOIDmode)
772     {
773       unsigned HOST_WIDE_INT value = 0;
774       int n_parts = GET_MODE_SIZE (mode);
775       int i, part = -1;
776 
777       for (i = 0; i < n_parts; i++)
778         {
779           if (i == 0)
780             value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
781           else if (i == HOST_BITS_PER_WIDE_INT / 8)
782             value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
783           else
784             value >>= 8;
785 
786           if ((value & 0xff) != (unsigned)(def & 0xff))
787             {
788               if (part != -1)
789                 return -1;
790               else
791                 part = i;
792             }
793         }
794 
795       return part == -1 ? 0 : (n_parts - 1 - part);
796     }
797 
798   return -1;
799 }
800 
801 /* Extract the QImode part number PART from integer
802    constant OP of mode MODE.  */
803 
804 int
s390_extract_qi(op,mode,part)805 s390_extract_qi (op, mode, part)
806     rtx op;
807     enum machine_mode mode;
808     int part;
809 {
810   int n_parts = GET_MODE_SIZE (mode);
811   if (part < 0 || part >= n_parts)
812     abort();
813   else
814     part = n_parts - 1 - part;
815 
816   if (GET_CODE (op) == CONST_INT)
817     {
818       unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
819       return ((value >> (8 * part)) & 0xff);
820     }
821   else if (GET_CODE (op) == CONST_DOUBLE
822            && GET_MODE (op) == VOIDmode)
823     {
824       unsigned HOST_WIDE_INT value;
825       if (part < HOST_BITS_PER_WIDE_INT / 8)
826         value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
827       else
828         value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
829         part -= HOST_BITS_PER_WIDE_INT / 8;
830 
831       return ((value >> (8 * part)) & 0xff);
832     }
833 
834   abort ();
835 }
836 
837 /* Check whether we can (and want to) split a double-word
838    move in mode MODE from SRC to DST into two single-word
839    moves, moving the subword FIRST_SUBWORD first.  */
840 
841 bool
s390_split_ok_p(dst,src,mode,first_subword)842 s390_split_ok_p (dst, src, mode, first_subword)
843      rtx dst;
844      rtx src;
845      enum machine_mode mode;
846      int first_subword;
847 {
848   /* Floating point registers cannot be split.  */
849   if (FP_REG_P (src) || FP_REG_P (dst))
850     return false;
851 
852   /* We don't need to split if operands are directly accessable.  */
853   if (s_operand (src, mode) || s_operand (dst, mode))
854     return false;
855 
856   /* Non-offsettable memory references cannot be split.  */
857   if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
858       || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
859     return false;
860 
861   /* Moving the first subword must not clobber a register
862      needed to move the second subword.  */
863   if (register_operand (dst, mode))
864     {
865       rtx subreg = operand_subword (dst, first_subword, 0, mode);
866       if (reg_overlap_mentioned_p (subreg, src))
867         return false;
868     }
869 
870   return true;
871 }
872 
873 
874 /* Change optimizations to be performed, depending on the
875    optimization level.
876 
877    LEVEL is the optimization level specified; 2 if `-O2' is
878    specified, 1 if `-O' is specified, and 0 if neither is specified.
879 
880    SIZE is nonzero if `-Os' is specified and zero otherwise.  */
881 
882 void
optimization_options(level,size)883 optimization_options (level, size)
884      int level ATTRIBUTE_UNUSED;
885      int size ATTRIBUTE_UNUSED;
886 {
887   /* ??? There are apparently still problems with -fcaller-saves.  */
888   flag_caller_saves = 0;
889 
890   /* By default, always emit DWARF-2 unwind info.  This allows debugging
891      without maintaining a stack frame back-chain.  */
892   flag_asynchronous_unwind_tables = 1;
893 }
894 
895 void
override_options()896 override_options ()
897 {
898   /* Acquire a unique set number for our register saves and restores.  */
899   s390_sr_alias_set = new_alias_set ();
900 
901   /* Set up function hooks.  */
902   init_machine_status = s390_init_machine_status;
903 }
904 
905 /* Map for smallest class containing reg regno.  */
906 
907 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
908 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
909   ADDR_REGS,    ADDR_REGS, ADDR_REGS, ADDR_REGS,
910   ADDR_REGS,    ADDR_REGS, ADDR_REGS, ADDR_REGS,
911   ADDR_REGS,    ADDR_REGS, ADDR_REGS, ADDR_REGS,
912   FP_REGS,      FP_REGS,   FP_REGS,   FP_REGS,
913   FP_REGS,      FP_REGS,   FP_REGS,   FP_REGS,
914   FP_REGS,      FP_REGS,   FP_REGS,   FP_REGS,
915   FP_REGS,      FP_REGS,   FP_REGS,   FP_REGS,
916   ADDR_REGS,    NO_REGS,   ADDR_REGS
917 };
918 
919 
920 /* Return true if OP a (const_int 0) operand.
921    OP is the current operation.
922    MODE is the current operation mode.  */
923 
924 int
const0_operand(op,mode)925 const0_operand (op, mode)
926      register rtx op;
927      enum machine_mode mode;
928 {
929   return op == CONST0_RTX (mode);
930 }
931 
932 /* Return true if OP is constant.
933    OP is the current operation.
934    MODE is the current operation mode.  */
935 
936 int
consttable_operand(op,mode)937 consttable_operand (op, mode)
938      rtx op;
939      enum machine_mode mode ATTRIBUTE_UNUSED;
940 {
941   return CONSTANT_P (op);
942 }
943 
944 /* Return true if the mode of operand OP matches MODE.
945    If MODE is set to VOIDmode, set it to the mode of OP.  */
946 
947 static int
check_mode(op,mode)948 check_mode (op, mode)
949      register rtx op;
950      enum machine_mode *mode;
951 {
952   if (*mode == VOIDmode)
953       *mode = GET_MODE (op);
954   else
955   {
956     if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
957        return 0;
958   }
959   return 1;
960 }
961 
962 /* Return true if OP a valid operand for the LARL instruction.
963    OP is the current operation.
964    MODE is the current operation mode.  */
965 
966 int
larl_operand(op,mode)967 larl_operand (op, mode)
968      register rtx op;
969      enum machine_mode mode;
970 {
971   if (! check_mode (op, &mode))
972     return 0;
973 
974   /* Allow labels and local symbols.  */
975   if (GET_CODE (op) == LABEL_REF)
976     return 1;
977   if (GET_CODE (op) == SYMBOL_REF
978       && XSTR (op, 0)[0] != '@'
979       && !tls_symbolic_operand (op)
980       && (!flag_pic || SYMBOL_REF_FLAG (op)
981           || CONSTANT_POOL_ADDRESS_P (op)))
982     return 1;
983 
984   /* Everything else must have a CONST, so strip it.  */
985   if (GET_CODE (op) != CONST)
986     return 0;
987   op = XEXP (op, 0);
988 
989   /* Allow adding *even* constants.  */
990   if (GET_CODE (op) == PLUS)
991     {
992       if (GET_CODE (XEXP (op, 1)) != CONST_INT
993           || (INTVAL (XEXP (op, 1)) & 1) != 0)
994         return 0;
995       op = XEXP (op, 0);
996     }
997 
998   /* Labels and local symbols allowed here as well.  */
999   if (GET_CODE (op) == LABEL_REF)
1000     return 1;
1001   if (GET_CODE (op) == SYMBOL_REF
1002       && XSTR (op, 0)[0] != '@'
1003       && !tls_symbolic_operand (op)
1004       && (!flag_pic || SYMBOL_REF_FLAG (op)
1005           || CONSTANT_POOL_ADDRESS_P (op)))
1006     return 1;
1007 
1008   /* Now we must have a @GOTENT offset or @PLT stub
1009      or an @INDNTPOFF TLS offset.  */
1010   if (GET_CODE (op) == UNSPEC
1011       && XINT (op, 1) == 111)
1012     return 1;
1013   if (GET_CODE (op) == UNSPEC
1014       && XINT (op, 1) == 113)
1015     return 1;
1016   if (GET_CODE (op) == UNSPEC
1017       && XINT (op, 1) == UNSPEC_INDNTPOFF)
1018     return 1;
1019 
1020   return 0;
1021 }
1022 
1023 /* Helper routine to implement s_operand and s_imm_operand.
1024    OP is the current operation.
1025    MODE is the current operation mode.
1026    ALLOW_IMMEDIATE specifies whether immediate operands should
1027    be accepted or not.  */
1028 
1029 static int
general_s_operand(op,mode,allow_immediate)1030 general_s_operand (op, mode, allow_immediate)
1031      register rtx op;
1032      enum machine_mode mode;
1033      int allow_immediate;
1034 {
1035   struct s390_address addr;
1036 
1037   /* Call general_operand first, so that we don't have to
1038      check for many special cases.  */
1039   if (!general_operand (op, mode))
1040     return 0;
1041 
1042   /* Just like memory_operand, allow (subreg (mem ...))
1043      after reload.  */
1044   if (reload_completed
1045       && GET_CODE (op) == SUBREG
1046       && GET_CODE (SUBREG_REG (op)) == MEM)
1047     op = SUBREG_REG (op);
1048 
1049   switch (GET_CODE (op))
1050     {
1051       /* Constants that we are sure will be forced to the
1052          literal pool in reload are OK as s-operand.  Note
1053 	 that we cannot call s390_preferred_reload_class here
1054 	 because it might not be known yet at this point
1055 	 whether the current function is a leaf or not.  */
1056       case CONST_INT:
1057       case CONST_DOUBLE:
1058 	if (!allow_immediate || reload_completed)
1059 	  break;
1060 	if (!legitimate_reload_constant_p (op))
1061 	  return 1;
1062 	if (!TARGET_64BIT)
1063 	  return 1;
1064 	break;
1065 
1066       /* Memory operands are OK unless they already use an
1067 	 index register.  */
1068       case MEM:
1069 	if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
1070 	  return 1;
1071 	if (s390_decompose_address (XEXP (op, 0), &addr)
1072 	    && !addr.indx)
1073 	  return 1;
1074 	break;
1075 
1076       default:
1077 	break;
1078     }
1079 
1080   return 0;
1081 }
1082 
1083 /* Return true if OP is a valid S-type operand.
1084    OP is the current operation.
1085    MODE is the current operation mode.  */
1086 
1087 int
s_operand(op,mode)1088 s_operand (op, mode)
1089      register rtx op;
1090      enum machine_mode mode;
1091 {
1092   return general_s_operand (op, mode, 0);
1093 }
1094 
1095 /* Return true if OP is a valid S-type operand or an immediate
1096    operand that can be addressed as S-type operand by forcing
1097    it into the literal pool.
1098    OP is the current operation.
1099    MODE is the current operation mode.  */
1100 
1101 int
s_imm_operand(op,mode)1102 s_imm_operand (op, mode)
1103      register rtx op;
1104      enum machine_mode mode;
1105 {
1106   return general_s_operand (op, mode, 1);
1107 }
1108 
1109 /* Return true if OP is a valid operand for a 'Q' constraint.
1110    This differs from s_operand in that only memory operands
1111    without index register are accepted, nothing else.  */
1112 
1113 int
q_constraint(op)1114 q_constraint (op)
1115      register rtx op;
1116 {
1117   struct s390_address addr;
1118 
1119   if (GET_CODE (op) != MEM)
1120     return 0;
1121 
1122   if (!s390_decompose_address (XEXP (op, 0), &addr))
1123     return 0;
1124 
1125   if (addr.indx)
1126     return 0;
1127 
1128   return 1;
1129 }
1130 
1131 /* Return the cost of an address rtx ADDR.  */
1132 
1133 int
s390_address_cost(addr)1134 s390_address_cost (addr)
1135      rtx addr;
1136 {
1137   struct s390_address ad;
1138   if (!s390_decompose_address (addr, &ad))
1139     return 1000;
1140 
1141   return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1142 }
1143 
1144 /* Return true if OP is a valid operand for the BRAS instruction.
1145    OP is the current operation.
1146    MODE is the current operation mode.  */
1147 
1148 int
bras_sym_operand(op,mode)1149 bras_sym_operand (op, mode)
1150      register rtx op;
1151      enum machine_mode mode ATTRIBUTE_UNUSED;
1152 {
1153   register enum rtx_code code = GET_CODE (op);
1154 
1155   /* Allow SYMBOL_REFs.  */
1156   if (code == SYMBOL_REF)
1157     return 1;
1158 
1159   /* Allow @PLT stubs.  */
1160   if (code == CONST
1161       && GET_CODE (XEXP (op, 0)) == UNSPEC
1162       && XINT (XEXP (op, 0), 1) == 113)
1163     return 1;
1164   return 0;
1165 }
1166 
1167 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1168    otherwise return 0.  */
1169 
1170 int
tls_symbolic_operand(op)1171 tls_symbolic_operand (op)
1172      register rtx op;
1173 {
1174   const char *symbol_str;
1175 
1176   if (GET_CODE (op) != SYMBOL_REF)
1177     return 0;
1178   symbol_str = XSTR (op, 0);
1179 
1180   if (symbol_str[0] != '%')
1181     return 0;
1182   return strchr (tls_model_chars, symbol_str[1]) - tls_model_chars;
1183 }
1184 
1185 /* Return true if OP is a load multiple operation.  It is known to be a
1186    PARALLEL and the first section will be tested.
1187    OP is the current operation.
1188    MODE is the current operation mode.  */
1189 
1190 int
load_multiple_operation(op,mode)1191 load_multiple_operation (op, mode)
1192      rtx op;
1193      enum machine_mode mode ATTRIBUTE_UNUSED;
1194 {
1195   int count = XVECLEN (op, 0);
1196   unsigned int dest_regno;
1197   rtx src_addr;
1198   int i, off;
1199 
1200 
1201   /* Perform a quick check so we don't blow up below.  */
1202   if (count <= 1
1203       || GET_CODE (XVECEXP (op, 0, 0)) != SET
1204       || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1205       || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1206     return 0;
1207 
1208   dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1209   src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1210 
1211   /* Check, is base, or base + displacement.  */
1212 
1213   if (GET_CODE (src_addr) == REG)
1214     off = 0;
1215   else if (GET_CODE (src_addr) == PLUS
1216 	   && GET_CODE (XEXP (src_addr, 0)) == REG
1217 	   && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1218     {
1219       off = INTVAL (XEXP (src_addr, 1));
1220       src_addr = XEXP (src_addr, 0);
1221     }
1222   else
1223     return 0;
1224 
1225   if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1226     return 0;
1227 
1228   for (i = 1; i < count; i++)
1229     {
1230       rtx elt = XVECEXP (op, 0, i);
1231 
1232       if (GET_CODE (elt) != SET
1233 	  || GET_CODE (SET_DEST (elt)) != REG
1234 	  || GET_MODE (SET_DEST (elt)) != Pmode
1235 	  || REGNO (SET_DEST (elt)) != dest_regno + i
1236 	  || GET_CODE (SET_SRC (elt)) != MEM
1237 	  || GET_MODE (SET_SRC (elt)) != Pmode
1238 	  || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1239 	  || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1240 	  || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1241 	  || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1242 	     != off + i * UNITS_PER_WORD)
1243 	return 0;
1244     }
1245 
1246   return 1;
1247 }
1248 
1249 /* Return true if OP is a store multiple operation.  It is known to be a
1250    PARALLEL and the first section will be tested.
1251    OP is the current operation.
1252    MODE is the current operation mode.  */
1253 
1254 int
store_multiple_operation(op,mode)1255 store_multiple_operation (op, mode)
1256      rtx op;
1257      enum machine_mode mode ATTRIBUTE_UNUSED;
1258 {
1259   int count = XVECLEN (op, 0);
1260   unsigned int src_regno;
1261   rtx dest_addr;
1262   int i, off;
1263 
1264   /* Perform a quick check so we don't blow up below.  */
1265   if (count <= 1
1266       || GET_CODE (XVECEXP (op, 0, 0)) != SET
1267       || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1268       || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1269     return 0;
1270 
1271   src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1272   dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1273 
1274   /* Check, is base, or base + displacement.  */
1275 
1276   if (GET_CODE (dest_addr) == REG)
1277     off = 0;
1278   else if (GET_CODE (dest_addr) == PLUS
1279 	   && GET_CODE (XEXP (dest_addr, 0)) == REG
1280 	   && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1281     {
1282       off = INTVAL (XEXP (dest_addr, 1));
1283       dest_addr = XEXP (dest_addr, 0);
1284     }
1285   else
1286     return 0;
1287 
1288   if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1289     return 0;
1290 
1291   for (i = 1; i < count; i++)
1292     {
1293       rtx elt = XVECEXP (op, 0, i);
1294 
1295       if (GET_CODE (elt) != SET
1296 	  || GET_CODE (SET_SRC (elt)) != REG
1297 	  || GET_MODE (SET_SRC (elt)) != Pmode
1298 	  || REGNO (SET_SRC (elt)) != src_regno + i
1299 	  || GET_CODE (SET_DEST (elt)) != MEM
1300 	  || GET_MODE (SET_DEST (elt)) != Pmode
1301 	  || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1302 	  || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1303 	  || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1304 	  || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1305 	     != off + i * UNITS_PER_WORD)
1306 	return 0;
1307     }
1308   return 1;
1309 }
1310 
1311 
1312 /* Return true if OP contains a symbol reference */
1313 
1314 int
symbolic_reference_mentioned_p(op)1315 symbolic_reference_mentioned_p (op)
1316      rtx op;
1317 {
1318   register const char *fmt;
1319   register int i;
1320 
1321   if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1322     return 1;
1323 
1324   fmt = GET_RTX_FORMAT (GET_CODE (op));
1325   for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1326     {
1327       if (fmt[i] == 'E')
1328 	{
1329 	  register int j;
1330 
1331 	  for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1332 	    if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1333 	      return 1;
1334 	}
1335 
1336       else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1337 	return 1;
1338     }
1339 
1340   return 0;
1341 }
1342 
1343 /* Return true if OP contains a reference to a thread-local symbol.  */
1344 
1345 int
tls_symbolic_reference_mentioned_p(op)1346 tls_symbolic_reference_mentioned_p (op)
1347      rtx op;
1348 {
1349   register const char *fmt;
1350   register int i;
1351 
1352   if (GET_CODE (op) == SYMBOL_REF)
1353     return tls_symbolic_operand (op);
1354 
1355   fmt = GET_RTX_FORMAT (GET_CODE (op));
1356   for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1357     {
1358       if (fmt[i] == 'E')
1359 	{
1360 	  register int j;
1361 
1362 	  for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1363 	    if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1364 	      return 1;
1365 	}
1366 
1367       else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
1368 	return 1;
1369     }
1370 
1371   return 0;
1372 }
1373 
1374 
1375 /* Return true if OP is a legitimate general operand when
1376    generating PIC code.  It is given that flag_pic is on
1377    and that OP satisfies CONSTANT_P or is a CONST_DOUBLE.  */
1378 
1379 int
legitimate_pic_operand_p(op)1380 legitimate_pic_operand_p (op)
1381      register rtx op;
1382 {
1383   /* Accept all non-symbolic constants.  */
1384   if (!SYMBOLIC_CONST (op))
1385     return 1;
1386 
1387   /* Reject everything else; must be handled
1388      via emit_symbolic_move.  */
1389   return 0;
1390 }
1391 
1392 /* Returns true if the constant value OP is a legitimate general operand.
1393    It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE.  */
1394 
1395 int
legitimate_constant_p(op)1396 legitimate_constant_p (op)
1397      register rtx op;
1398 {
1399   /* Accept all non-symbolic constants.  */
1400   if (!SYMBOLIC_CONST (op))
1401     return 1;
1402 
1403   /* Accept immediate LARL operands.  */
1404   if (TARGET_64BIT && larl_operand (op, VOIDmode))
1405     return 1;
1406 
1407   /* Thread-local symbols are never legal constants.  This is
1408      so that emit_call knows that computing such addresses
1409      might require a function call.  */
1410   if (TLS_SYMBOLIC_CONST (op))
1411     return 0;
1412 
1413   /* In the PIC case, symbolic constants must *not* be
1414      forced into the literal pool.  We accept them here,
1415      so that they will be handled by emit_symbolic_move.  */
1416   if (flag_pic)
1417     return 1;
1418 
1419   /* All remaining non-PIC symbolic constants are
1420      forced into the literal pool.  */
1421   return 0;
1422 }
1423 
1424 /* Determine if it's legal to put X into the constant pool.  This
1425    is not possible if X contains the address of a symbol that is
1426    not constant (TLS) or not known at final link time (PIC).  */
1427 
1428 static bool
s390_cannot_force_const_mem(x)1429 s390_cannot_force_const_mem (x)
1430      rtx x;
1431 {
1432   switch (GET_CODE (x))
1433     {
1434     case CONST_INT:
1435     case CONST_DOUBLE:
1436       /* Accept all non-symbolic constants.  */
1437       return false;
1438 
1439     case LABEL_REF:
1440       /* Labels are OK iff we are non-PIC.  */
1441       return flag_pic != 0;
1442 
1443     case SYMBOL_REF:
1444       /* 'Naked' TLS symbol references are never OK,
1445          non-TLS symbols are OK iff we are non-PIC.  */
1446       if (tls_symbolic_operand (x))
1447 	return true;
1448       else
1449 	return flag_pic != 0;
1450 
1451     case CONST:
1452       return s390_cannot_force_const_mem (XEXP (x, 0));
1453     case PLUS:
1454     case MINUS:
1455       return s390_cannot_force_const_mem (XEXP (x, 0))
1456 	     || s390_cannot_force_const_mem (XEXP (x, 1));
1457 
1458     case UNSPEC:
1459       switch (XINT (x, 1))
1460 	{
1461 	/* Only lt-relative or GOT-relative UNSPECs are OK.  */
1462 	case 100:
1463 	case 104:
1464 	case 112:
1465 	case 114:
1466 	case UNSPEC_TLSGD:
1467 	case UNSPEC_TLSLDM:
1468 	case UNSPEC_NTPOFF:
1469 	case UNSPEC_DTPOFF:
1470 	case UNSPEC_GOTNTPOFF:
1471 	case UNSPEC_INDNTPOFF:
1472 	  return false;
1473 
1474 	default:
1475 	  return true;
1476 	}
1477       break;
1478 
1479     default:
1480       abort ();
1481     }
1482 }
1483 
1484 /* Returns true if the constant value OP is a legitimate general
1485    operand during and after reload.  The difference to
1486    legitimate_constant_p is that this function will not accept
1487    a constant that would need to be forced to the literal pool
1488    before it can be used as operand.  */
1489 
1490 int
legitimate_reload_constant_p(op)1491 legitimate_reload_constant_p (op)
1492      register rtx op;
1493 {
1494   /* Accept l(g)hi operands.  */
1495   if (GET_CODE (op) == CONST_INT
1496       && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1497     return 1;
1498 
1499   /* Accept lliXX operands.  */
1500   if (TARGET_64BIT
1501       && s390_single_hi (op, DImode, 0) >= 0)
1502   return 1;
1503 
1504   /* Accept larl operands.  */
1505   if (TARGET_64BIT
1506       && larl_operand (op, VOIDmode))
1507     return 1;
1508 
1509   /* Everything else cannot be handled without reload.  */
1510   return 0;
1511 }
1512 
1513 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1514    return the class of reg to actually use.  */
1515 
1516 enum reg_class
s390_preferred_reload_class(op,class)1517 s390_preferred_reload_class (op, class)
1518      rtx op;
1519      enum reg_class class;
1520 {
1521   /* This can happen if a floating point constant is being
1522      reloaded into an integer register.  Leave well alone.  */
1523   if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1524       && class != FP_REGS)
1525     return class;
1526 
1527   switch (GET_CODE (op))
1528     {
1529       /* Constants we cannot reload must be forced into the
1530 	 literal pool.  */
1531 
1532       case CONST_DOUBLE:
1533       case CONST_INT:
1534 	if (legitimate_reload_constant_p (op))
1535 	  return class;
1536 	else
1537 	  return NO_REGS;
1538 
1539       /* If a symbolic constant or a PLUS is reloaded,
1540 	 it is most likely being used as an address, so
1541 	 prefer ADDR_REGS.  If 'class' is not a superset
1542 	 of ADDR_REGS, e.g. FP_REGS, reject this reload.  */
1543       case PLUS:
1544       case LABEL_REF:
1545       case SYMBOL_REF:
1546       case CONST:
1547 	if (reg_class_subset_p (ADDR_REGS, class))
1548           return ADDR_REGS;
1549 	else
1550 	  return NO_REGS;
1551 
1552       default:
1553 	break;
1554     }
1555 
1556   return class;
1557 }
1558 
1559 /* Return the register class of a scratch register needed to
1560    load IN into a register of class CLASS in MODE.
1561 
1562    We need a temporary when loading a PLUS expression which
1563    is not a legitimate operand of the LOAD ADDRESS instruction.  */
1564 
1565 enum reg_class
s390_secondary_input_reload_class(class,mode,in)1566 s390_secondary_input_reload_class (class, mode, in)
1567      enum reg_class class ATTRIBUTE_UNUSED;
1568      enum machine_mode mode;
1569      rtx in;
1570 {
1571   if (s390_plus_operand (in, mode))
1572     return ADDR_REGS;
1573 
1574   return NO_REGS;
1575 }
1576 
1577 /* Return the register class of a scratch register needed to
1578    store a register of class CLASS in MODE into OUT:
1579 
1580    We need a temporary when storing a double-word to a
1581    non-offsettable memory address.  */
1582 
1583 enum reg_class
s390_secondary_output_reload_class(class,mode,out)1584 s390_secondary_output_reload_class (class, mode, out)
1585      enum reg_class class;
1586      enum machine_mode mode;
1587      rtx out;
1588 {
1589   if ((TARGET_64BIT ? mode == TImode
1590                     : (mode == DImode || mode == DFmode))
1591       && reg_classes_intersect_p (GENERAL_REGS, class)
1592       && GET_CODE (out) == MEM
1593       && !offsettable_memref_p (out)
1594       && !s_operand (out, VOIDmode))
1595     return ADDR_REGS;
1596 
1597   return NO_REGS;
1598 }
1599 
1600 /* Return true if OP is a PLUS that is not a legitimate
1601    operand for the LA instruction.
1602    OP is the current operation.
1603    MODE is the current operation mode.  */
1604 
1605 int
s390_plus_operand(op,mode)1606 s390_plus_operand (op, mode)
1607      register rtx op;
1608      enum machine_mode mode;
1609 {
1610   if (!check_mode (op, &mode) || mode != Pmode)
1611     return FALSE;
1612 
1613   if (GET_CODE (op) != PLUS)
1614     return FALSE;
1615 
1616   if (legitimate_la_operand_p (op))
1617     return FALSE;
1618 
1619   return TRUE;
1620 }
1621 
1622 /* Generate code to load SRC, which is PLUS that is not a
1623    legitimate operand for the LA instruction, into TARGET.
1624    SCRATCH may be used as scratch register.  */
1625 
1626 void
s390_expand_plus_operand(target,src,scratch)1627 s390_expand_plus_operand (target, src, scratch)
1628      register rtx target;
1629      register rtx src;
1630      register rtx scratch;
1631 {
1632   rtx sum1, sum2;
1633   struct s390_address ad;
1634 
1635   /* src must be a PLUS; get its two operands.  */
1636   if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1637     abort ();
1638 
1639   /* Check if any of the two operands is already scheduled
1640      for replacement by reload.  This can happen e.g. when
1641      float registers occur in an address.  */
1642   sum1 = find_replacement (&XEXP (src, 0));
1643   sum2 = find_replacement (&XEXP (src, 1));
1644   src = gen_rtx_PLUS (Pmode, sum1, sum2);
1645 
1646   /* If the address is already strictly valid, there's nothing to do.  */
1647   if (!s390_decompose_address (src, &ad)
1648       || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1649       || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
1650     {
1651       /* Otherwise, one of the operands cannot be an address register;
1652          we reload its value into the scratch register.  */
1653       if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1654 	{
1655 	  emit_move_insn (scratch, sum1);
1656 	  sum1 = scratch;
1657 	}
1658       if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1659 	{
1660 	  emit_move_insn (scratch, sum2);
1661 	  sum2 = scratch;
1662 	}
1663 
1664       /* According to the way these invalid addresses are generated
1665          in reload.c, it should never happen (at least on s390) that
1666          *neither* of the PLUS components, after find_replacements
1667          was applied, is an address register.  */
1668       if (sum1 == scratch && sum2 == scratch)
1669 	{
1670 	  debug_rtx (src);
1671 	  abort ();
1672 	}
1673 
1674       src = gen_rtx_PLUS (Pmode, sum1, sum2);
1675     }
1676 
1677   /* Emit the LOAD ADDRESS pattern.  Note that reload of PLUS
1678      is only ever performed on addresses, so we can mark the
1679      sum as legitimate for LA in any case.  */
1680   s390_load_address (target, src);
1681 }
1682 
1683 
1684 /* Decompose a RTL expression ADDR for a memory address into
1685    its components, returned in OUT.
1686 
1687    Returns 0 if ADDR is not a valid memory address, nonzero
1688    otherwise.  If OUT is NULL, don't return the components,
1689    but check for validity only.
1690 
1691    Note: Only addresses in canonical form are recognized.
1692    LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1693    canonical form so that they will be recognized.  */
1694 
1695 static int
s390_decompose_address(addr,out)1696 s390_decompose_address (addr, out)
1697      register rtx addr;
1698      struct s390_address *out;
1699 {
1700   rtx base = NULL_RTX;
1701   rtx indx = NULL_RTX;
1702   rtx disp = NULL_RTX;
1703   int pointer = FALSE;
1704 
1705   /* Decompose address into base + index + displacement.  */
1706 
1707   if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1708     base = addr;
1709 
1710   else if (GET_CODE (addr) == PLUS)
1711     {
1712       rtx op0 = XEXP (addr, 0);
1713       rtx op1 = XEXP (addr, 1);
1714       enum rtx_code code0 = GET_CODE (op0);
1715       enum rtx_code code1 = GET_CODE (op1);
1716 
1717       if (code0 == REG || code0 == UNSPEC)
1718 	{
1719 	  if (code1 == REG || code1 == UNSPEC)
1720 	    {
1721 	      indx = op0;	/* index + base */
1722 	      base = op1;
1723 	    }
1724 
1725 	  else
1726 	    {
1727 	      base = op0;	/* base + displacement */
1728 	      disp = op1;
1729 	    }
1730 	}
1731 
1732       else if (code0 == PLUS)
1733 	{
1734 	  indx = XEXP (op0, 0);	/* index + base + disp */
1735 	  base = XEXP (op0, 1);
1736 	  disp = op1;
1737 	}
1738 
1739       else
1740 	{
1741 	  return FALSE;
1742 	}
1743     }
1744 
1745   else
1746     disp = addr;		/* displacement */
1747 
1748 
1749   /* Prefer to use pointer as base, not index.  */
1750   if (base && indx)
1751     {
1752       int base_ptr = GET_CODE (base) == UNSPEC
1753 		     || (REG_P (base) && REG_POINTER (base));
1754       int indx_ptr = GET_CODE (indx) == UNSPEC
1755 		     || (REG_P (indx) && REG_POINTER (indx));
1756 
1757       if (!base_ptr && indx_ptr)
1758 	{
1759 	  rtx tmp = base;
1760 	  base = indx;
1761 	  indx = tmp;
1762 	}
1763     }
1764 
1765   /* Validate base register.  */
1766   if (base)
1767     {
1768       if (GET_CODE (base) == UNSPEC)
1769         {
1770           if (XVECLEN (base, 0) != 1 || XINT (base, 1) != 101)
1771 	      return FALSE;
1772 	  base = XVECEXP (base, 0, 0);
1773 	  pointer = TRUE;
1774 	}
1775 
1776       if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1777 	  return FALSE;
1778 
1779       if (REGNO (base) == BASE_REGISTER
1780 	  || REGNO (base) == STACK_POINTER_REGNUM
1781 	  || REGNO (base) == FRAME_POINTER_REGNUM
1782 	  || ((reload_completed || reload_in_progress)
1783 	      && frame_pointer_needed
1784 	      && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1785 	  || REGNO (base) == ARG_POINTER_REGNUM
1786 	  || (REGNO (base) >= FIRST_VIRTUAL_REGISTER
1787 	      && REGNO (base) <= LAST_VIRTUAL_REGISTER)
1788           || (flag_pic
1789               && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1790         pointer = TRUE;
1791     }
1792 
1793   /* Validate index register.  */
1794   if (indx)
1795     {
1796       if (GET_CODE (indx) == UNSPEC)
1797         {
1798           if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != 101)
1799 	      return FALSE;
1800 	  indx = XVECEXP (indx, 0, 0);
1801 	  pointer = TRUE;
1802 	}
1803 
1804       if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
1805 	  return FALSE;
1806 
1807       if (REGNO (indx) == BASE_REGISTER
1808 	  || REGNO (indx) == STACK_POINTER_REGNUM
1809 	  || REGNO (indx) == FRAME_POINTER_REGNUM
1810 	  || ((reload_completed || reload_in_progress)
1811 	      && frame_pointer_needed
1812 	      && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1813 	  || REGNO (indx) == ARG_POINTER_REGNUM
1814 	  || (REGNO (indx) >= FIRST_VIRTUAL_REGISTER
1815 	      && REGNO (indx) <= LAST_VIRTUAL_REGISTER)
1816           || (flag_pic
1817               && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1818         pointer = TRUE;
1819     }
1820 
1821   /* Validate displacement.  */
1822   if (disp)
1823     {
1824       /* Allow integer constant in range.  */
1825       if (GET_CODE (disp) == CONST_INT)
1826         {
1827 	  /* If the argument pointer is involved, the displacement will change
1828 	     later anyway as the argument pointer gets eliminated.  This could
1829 	     make a valid displacement invalid, but it is more likely to make
1830 	     an invalid displacement valid, because we sometimes access the
1831 	     register save area via negative offsets to the arg pointer.
1832 	     Thus we don't check the displacement for validity here.  If after
1833 	     elimination the displacement turns out to be invalid after all,
1834 	     this is fixed up by reload in any case.  */
1835 	  if (base != arg_pointer_rtx && indx != arg_pointer_rtx)
1836 	    {
1837 	      if (INTVAL (disp) < 0 || INTVAL (disp) >= 4096)
1838 	        return FALSE;
1839 	    }
1840         }
1841 
1842       /* In the small-PIC case, the linker converts @GOT12
1843          and @GOTNTPOFF offsets to possible displacements.  */
1844       else if (GET_CODE (disp) == CONST
1845                && GET_CODE (XEXP (disp, 0)) == UNSPEC
1846                && (XINT (XEXP (disp, 0), 1) == 110
1847 		   || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
1848         {
1849           if (flag_pic != 1)
1850             return FALSE;
1851 
1852 	  pointer = TRUE;
1853         }
1854 
1855       /* Accept chunkfied literal pool symbol references.  */
1856       else if (GET_CODE (disp) == CONST
1857                && GET_CODE (XEXP (disp, 0)) == MINUS
1858                && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
1859                && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
1860         {
1861 	  pointer = TRUE;
1862         }
1863 
1864       /* Likewise if a constant offset is present.  */
1865       else if (GET_CODE (disp) == CONST
1866                && GET_CODE (XEXP (disp, 0)) == PLUS
1867                && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
1868                && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
1869                && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
1870                && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
1871         {
1872 	  pointer = TRUE;
1873         }
1874 
1875       /* We can convert literal pool addresses to
1876          displacements by basing them off the base register.  */
1877       else
1878         {
1879           /* In some cases, we can accept an additional
1880              small constant offset.  Split these off here.  */
1881 
1882           unsigned int offset = 0;
1883 
1884           if (GET_CODE (disp) == CONST
1885               && GET_CODE (XEXP (disp, 0)) == PLUS
1886               && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1887             {
1888               offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1889               disp = XEXP (XEXP (disp, 0), 0);
1890             }
1891 
1892           /* Now we must have a literal pool address.  */
1893           if (GET_CODE (disp) != SYMBOL_REF
1894               || !CONSTANT_POOL_ADDRESS_P (disp))
1895             return FALSE;
1896 
1897           /* If we have an offset, make sure it does not
1898              exceed the size of the constant pool entry.  */
1899           if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
1900             return FALSE;
1901 
1902           /* Either base or index must be free to
1903              hold the base register.  */
1904           if (base && indx)
1905             return FALSE;
1906 
1907           /* Convert the address.  */
1908           if (base)
1909             indx = gen_rtx_REG (Pmode, BASE_REGISTER);
1910           else
1911             base = gen_rtx_REG (Pmode, BASE_REGISTER);
1912 
1913           disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp), 100);
1914           disp = gen_rtx_CONST (Pmode, disp);
1915 
1916           if (offset)
1917             disp = plus_constant (disp, offset);
1918 
1919 	  pointer = TRUE;
1920         }
1921     }
1922 
1923   if (!base && !indx)
1924     pointer = TRUE;
1925 
1926   if (out)
1927     {
1928       out->base = base;
1929       out->indx = indx;
1930       out->disp = disp;
1931       out->pointer = pointer;
1932     }
1933 
1934   return TRUE;
1935 }
1936 
1937 /* Return nonzero if ADDR is a valid memory address.
1938    STRICT specifies whether strict register checking applies.  */
1939 
1940 int
legitimate_address_p(mode,addr,strict)1941 legitimate_address_p (mode, addr, strict)
1942      enum machine_mode mode ATTRIBUTE_UNUSED;
1943      register rtx addr;
1944      int strict;
1945 {
1946   struct s390_address ad;
1947   if (!s390_decompose_address (addr, &ad))
1948     return FALSE;
1949 
1950   if (strict)
1951     {
1952       if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1953 	return FALSE;
1954       if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
1955 	return FALSE;
1956     }
1957   else
1958     {
1959       if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
1960 	return FALSE;
1961       if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
1962 	return FALSE;
1963     }
1964 
1965   return TRUE;
1966 }
1967 
1968 /* Return 1 if OP is a valid operand for the LA instruction.
1969    In 31-bit, we need to prove that the result is used as an
1970    address, as LA performs only a 31-bit addition.  */
1971 
1972 int
legitimate_la_operand_p(op)1973 legitimate_la_operand_p (op)
1974      register rtx op;
1975 {
1976   struct s390_address addr;
1977   if (!s390_decompose_address (op, &addr))
1978     return FALSE;
1979 
1980   if (TARGET_64BIT || addr.pointer)
1981     return TRUE;
1982 
1983   return FALSE;
1984 }
1985 
1986 /* Return 1 if OP is a valid operand for the LA instruction,
1987    and we prefer to use LA over addition to compute it.  */
1988 
1989 int
preferred_la_operand_p(op)1990 preferred_la_operand_p (op)
1991      register rtx op;
1992 {
1993   struct s390_address addr;
1994   if (!s390_decompose_address (op, &addr))
1995     return FALSE;
1996 
1997   if (!TARGET_64BIT && !addr.pointer)
1998     return FALSE;
1999 
2000   if (addr.pointer)
2001     return TRUE;
2002 
2003   if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2004       || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2005     return TRUE;
2006 
2007   return FALSE;
2008 }
2009 
2010 /* Emit a forced load-address operation to load SRC into DST.
2011    This will use the LOAD ADDRESS instruction even in situations
2012    where legitimate_la_operand_p (SRC) returns false.  */
2013 
2014 void
s390_load_address(dst,src)2015 s390_load_address (dst, src)
2016      rtx dst;
2017      rtx src;
2018 {
2019   if (TARGET_64BIT)
2020     emit_move_insn (dst, src);
2021   else
2022     emit_insn (gen_force_la_31 (dst, src));
2023 }
2024 
2025 /* Return a legitimate reference for ORIG (an address) using the
2026    register REG.  If REG is 0, a new pseudo is generated.
2027 
2028    There are two types of references that must be handled:
2029 
2030    1. Global data references must load the address from the GOT, via
2031       the PIC reg.  An insn is emitted to do this load, and the reg is
2032       returned.
2033 
2034    2. Static data references, constant pool addresses, and code labels
2035       compute the address as an offset from the GOT, whose base is in
2036       the PIC reg.  Static data objects have SYMBOL_REF_FLAG set to
2037       differentiate them from global data objects.  The returned
2038       address is the PIC reg + an unspec constant.
2039 
2040    GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2041    reg also appears in the address.  */
2042 
2043 rtx
legitimize_pic_address(orig,reg)2044 legitimize_pic_address (orig, reg)
2045      rtx orig;
2046      rtx reg;
2047 {
2048   rtx addr = orig;
2049   rtx new = orig;
2050   rtx base;
2051 
2052   if (GET_CODE (addr) == LABEL_REF
2053       || (GET_CODE (addr) == SYMBOL_REF
2054 	  && (SYMBOL_REF_FLAG (addr)
2055               || CONSTANT_POOL_ADDRESS_P (addr))))
2056     {
2057       /* This is a local symbol.  */
2058       if (TARGET_64BIT && larl_operand (addr, VOIDmode))
2059         {
2060           /* Access local symbols PC-relative via LARL.
2061              This is the same as in the non-PIC case, so it is
2062              handled automatically ...  */
2063         }
2064       else
2065         {
2066           /* Access local symbols relative to the literal pool.  */
2067 
2068           rtx temp = reg? reg : gen_reg_rtx (Pmode);
2069 
2070           addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 100);
2071           addr = gen_rtx_CONST (Pmode, addr);
2072           addr = force_const_mem (Pmode, addr);
2073 	  emit_move_insn (temp, addr);
2074 
2075           base = gen_rtx_REG (Pmode, BASE_REGISTER);
2076           base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2077           new = gen_rtx_PLUS (Pmode, base, temp);
2078 
2079           if (reg != 0)
2080             {
2081               emit_move_insn (reg, new);
2082               new = reg;
2083             }
2084         }
2085     }
2086   else if (GET_CODE (addr) == SYMBOL_REF)
2087     {
2088       if (reg == 0)
2089         reg = gen_reg_rtx (Pmode);
2090 
2091       if (flag_pic == 1)
2092         {
2093           /* Assume GOT offset < 4k.  This is handled the same way
2094              in both 31- and 64-bit code (@GOT12).  */
2095 
2096 	  if (reload_in_progress || reload_completed)
2097 	    regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2098 
2099           new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 110);
2100           new = gen_rtx_CONST (Pmode, new);
2101           new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2102           new = gen_rtx_MEM (Pmode, new);
2103           RTX_UNCHANGING_P (new) = 1;
2104           emit_move_insn (reg, new);
2105           new = reg;
2106         }
2107       else if (TARGET_64BIT)
2108         {
2109           /* If the GOT offset might be >= 4k, we determine the position
2110              of the GOT entry via a PC-relative LARL (@GOTENT).  */
2111 
2112           rtx temp = gen_reg_rtx (Pmode);
2113 
2114           new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 111);
2115           new = gen_rtx_CONST (Pmode, new);
2116           emit_move_insn (temp, new);
2117 
2118           new = gen_rtx_MEM (Pmode, temp);
2119           RTX_UNCHANGING_P (new) = 1;
2120           emit_move_insn (reg, new);
2121           new = reg;
2122         }
2123       else
2124         {
2125           /* If the GOT offset might be >= 4k, we have to load it
2126              from the literal pool (@GOT).  */
2127 
2128           rtx temp = gen_reg_rtx (Pmode);
2129 
2130 	  if (reload_in_progress || reload_completed)
2131 	    regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2132 
2133           addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 112);
2134           addr = gen_rtx_CONST (Pmode, addr);
2135           addr = force_const_mem (Pmode, addr);
2136           emit_move_insn (temp, addr);
2137 
2138           new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2139           new = gen_rtx_MEM (Pmode, new);
2140           RTX_UNCHANGING_P (new) = 1;
2141           emit_move_insn (reg, new);
2142           new = reg;
2143         }
2144     }
2145   else
2146     {
2147       if (GET_CODE (addr) == CONST)
2148 	{
2149 	  addr = XEXP (addr, 0);
2150 	  if (GET_CODE (addr) == UNSPEC)
2151 	    {
2152 	      if (XVECLEN (addr, 0) != 1)
2153                 abort ();
2154               switch (XINT (addr, 1))
2155                 {
2156                   /* If someone moved an @GOT or lt-relative UNSPEC
2157                      out of the literal pool, force them back in.  */
2158                   case 100:
2159                   case 112:
2160                   case 114:
2161                     new = force_const_mem (Pmode, orig);
2162                     break;
2163 
2164                   /* @GOTENT is OK as is.  */
2165                   case 111:
2166                     break;
2167 
2168                   /* @PLT is OK as is on 64-bit, must be converted to
2169                      lt-relative PLT on 31-bit.  */
2170                   case 113:
2171                     if (!TARGET_64BIT)
2172                       {
2173                         rtx temp = reg? reg : gen_reg_rtx (Pmode);
2174 
2175                         addr = XVECEXP (addr, 0, 0);
2176                         addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 114);
2177                         addr = gen_rtx_CONST (Pmode, addr);
2178                         addr = force_const_mem (Pmode, addr);
2179 	                emit_move_insn (temp, addr);
2180 
2181                         base = gen_rtx_REG (Pmode, BASE_REGISTER);
2182                         base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2183                         new = gen_rtx_PLUS (Pmode, base, temp);
2184 
2185                         if (reg != 0)
2186                           {
2187                             emit_move_insn (reg, new);
2188                             new = reg;
2189                           }
2190                       }
2191                     break;
2192 
2193                   /* Everything else cannot happen.  */
2194                   default:
2195                     abort ();
2196                 }
2197 	    }
2198 	  else if (GET_CODE (addr) != PLUS)
2199 	    abort ();
2200 	}
2201       if (GET_CODE (addr) == PLUS)
2202 	{
2203 	  rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2204 	  /* Check first to see if this is a constant offset
2205              from a local symbol reference.  */
2206 	  if ((GET_CODE (op0) == LABEL_REF
2207 		|| (GET_CODE (op0) == SYMBOL_REF
2208 		    && (SYMBOL_REF_FLAG (op0)
2209                         || CONSTANT_POOL_ADDRESS_P (op0))))
2210 	      && GET_CODE (op1) == CONST_INT)
2211 	    {
2212               if (TARGET_64BIT && larl_operand (op0, VOIDmode))
2213                 {
2214                   if (INTVAL (op1) & 1)
2215                     {
2216                       /* LARL can't handle odd offsets, so emit a
2217                          pair of LARL and LA.  */
2218                       rtx temp = reg? reg : gen_reg_rtx (Pmode);
2219 
2220                       if (INTVAL (op1) < 0 || INTVAL (op1) >= 4096)
2221                         {
2222                           int even = INTVAL (op1) - 1;
2223                           op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2224 			  op0 = gen_rtx_CONST (Pmode, op0);
2225                           op1 = GEN_INT (1);
2226                         }
2227 
2228                       emit_move_insn (temp, op0);
2229                       new = gen_rtx_PLUS (Pmode, temp, op1);
2230 
2231                       if (reg != 0)
2232                         {
2233                           emit_move_insn (reg, new);
2234                           new = reg;
2235                         }
2236                     }
2237                   else
2238                     {
2239                       /* If the offset is even, we can just use LARL.
2240                          This will happen automatically.  */
2241                     }
2242                 }
2243               else
2244                 {
2245                   /* Access local symbols relative to the literal pool.  */
2246 
2247                   rtx temp = reg? reg : gen_reg_rtx (Pmode);
2248 
2249                   addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0), 100);
2250                   addr = gen_rtx_PLUS (Pmode, addr, op1);
2251                   addr = gen_rtx_CONST (Pmode, addr);
2252                   addr = force_const_mem (Pmode, addr);
2253         	  emit_move_insn (temp, addr);
2254 
2255                   base = gen_rtx_REG (Pmode, BASE_REGISTER);
2256                   base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2257                   new = gen_rtx_PLUS (Pmode, base, temp);
2258 
2259                   if (reg != 0)
2260                     {
2261                       emit_move_insn (reg, new);
2262                       new = reg;
2263                     }
2264                 }
2265 	    }
2266 
2267           /* Now, check whether it is an LT-relative symbol plus offset
2268              that was pulled out of the literal pool.  Force it back in.  */
2269 
2270 	  else if (GET_CODE (op0) == UNSPEC
2271 	           && GET_CODE (op1) == CONST_INT
2272 	           && XINT (op0, 1) == 100)
2273             {
2274 	      if (XVECLEN (op0, 0) != 1)
2275                 abort ();
2276 
2277               new = force_const_mem (Pmode, orig);
2278             }
2279 
2280           /* Otherwise, compute the sum.  */
2281 	  else
2282 	    {
2283 	      base = legitimize_pic_address (XEXP (addr, 0), reg);
2284 	      new  = legitimize_pic_address (XEXP (addr, 1),
2285 					     base == reg ? NULL_RTX : reg);
2286 	      if (GET_CODE (new) == CONST_INT)
2287 		new = plus_constant (base, INTVAL (new));
2288 	      else
2289 		{
2290 		  if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2291 		    {
2292 		      base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2293 		      new = XEXP (new, 1);
2294 		    }
2295 		  new = gen_rtx_PLUS (Pmode, base, new);
2296 		}
2297 
2298 	      if (GET_CODE (new) == CONST)
2299 		new = XEXP (new, 0);
2300               new = force_operand (new, 0);
2301 	    }
2302 	}
2303     }
2304   return new;
2305 }
2306 
2307 /* Load the thread pointer into a register.  */
2308 
2309 static rtx
get_thread_pointer()2310 get_thread_pointer ()
2311 {
2312   rtx tp;
2313 
2314   tp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TP);
2315   tp = force_reg (Pmode, tp);
2316   mark_reg_pointer (tp, BITS_PER_WORD);
2317 
2318   return tp;
2319 }
2320 
2321 /* Construct the SYMBOL_REF for the tls_get_offset function.  */
2322 
2323 static GTY(()) rtx s390_tls_symbol;
2324 rtx
s390_tls_get_offset()2325 s390_tls_get_offset ()
2326 {
2327   if (!s390_tls_symbol)
2328     s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
2329 
2330   return s390_tls_symbol;
2331 }
2332 
2333 /* ADDR contains a thread-local SYMBOL_REF.  Generate code to compute
2334    this (thread-local) address.  REG may be used as temporary.  */
2335 
2336 static rtx
legitimize_tls_address(addr,reg)2337 legitimize_tls_address (addr, reg)
2338      rtx addr;
2339      rtx reg;
2340 {
2341   rtx new, tls_call, temp, base, r2, insn;
2342 
2343   if (GET_CODE (addr) == SYMBOL_REF)
2344     switch (tls_symbolic_operand (addr))
2345       {
2346       case TLS_MODEL_GLOBAL_DYNAMIC:
2347 	start_sequence ();
2348 	r2 = gen_rtx_REG (Pmode, 2);
2349 	tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
2350 	new = gen_rtx_CONST (Pmode, tls_call);
2351 	new = force_const_mem (Pmode, new);
2352 	emit_move_insn (r2, new);
2353 	emit_call_insn (gen_call_value_tls (r2, tls_call));
2354 	insn = get_insns ();
2355 	end_sequence ();
2356 
2357 	new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2358 	temp = gen_reg_rtx (Pmode);
2359 	emit_libcall_block (insn, temp, r2, new);
2360 
2361 	new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2362 	if (reg != 0)
2363 	  {
2364 	    s390_load_address (reg, new);
2365 	    new = reg;
2366 	  }
2367 	break;
2368 
2369       case TLS_MODEL_LOCAL_DYNAMIC:
2370 	start_sequence ();
2371 	r2 = gen_rtx_REG (Pmode, 2);
2372 	tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
2373 	new = gen_rtx_CONST (Pmode, tls_call);
2374 	new = force_const_mem (Pmode, new);
2375 	emit_move_insn (r2, new);
2376 	emit_call_insn (gen_call_value_tls (r2, tls_call));
2377 	insn = get_insns ();
2378 	end_sequence ();
2379 
2380 	new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
2381 	temp = gen_reg_rtx (Pmode);
2382 	emit_libcall_block (insn, temp, r2, new);
2383 
2384 	new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2385 	base = gen_reg_rtx (Pmode);
2386 	s390_load_address (base, new);
2387 
2388 	new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
2389 	new = gen_rtx_CONST (Pmode, new);
2390 	new = force_const_mem (Pmode, new);
2391 	temp = gen_reg_rtx (Pmode);
2392 	emit_move_insn (temp, new);
2393 
2394 	new = gen_rtx_PLUS (Pmode, base, temp);
2395 	if (reg != 0)
2396 	  {
2397 	    s390_load_address (reg, new);
2398 	    new = reg;
2399 	  }
2400 	break;
2401 
2402       case TLS_MODEL_INITIAL_EXEC:
2403 	if (flag_pic == 1)
2404 	  {
2405 	    /* Assume GOT offset < 4k.  This is handled the same way
2406 	       in both 31- and 64-bit code.  */
2407 
2408 	    if (reload_in_progress || reload_completed)
2409 	      regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2410 
2411 	    new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2412 	    new = gen_rtx_CONST (Pmode, new);
2413 	    new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2414 	    new = gen_rtx_MEM (Pmode, new);
2415 	    RTX_UNCHANGING_P (new) = 1;
2416 	    temp = gen_reg_rtx (Pmode);
2417 	    emit_move_insn (temp, new);
2418 	  }
2419 	else if (TARGET_64BIT)
2420 	  {
2421 	    /* If the GOT offset might be >= 4k, we determine the position
2422 	       of the GOT entry via a PC-relative LARL.  */
2423 
2424 	    new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2425 	    new = gen_rtx_CONST (Pmode, new);
2426 	    temp = gen_reg_rtx (Pmode);
2427 	    emit_move_insn (temp, new);
2428 
2429 	    new = gen_rtx_MEM (Pmode, temp);
2430 	    RTX_UNCHANGING_P (new) = 1;
2431 	    temp = gen_reg_rtx (Pmode);
2432 	    emit_move_insn (temp, new);
2433 	  }
2434 	else if (flag_pic)
2435 	  {
2436 	    /* If the GOT offset might be >= 4k, we have to load it
2437 	       from the literal pool.  */
2438 
2439 	    if (reload_in_progress || reload_completed)
2440 	      regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2441 
2442 	    new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2443 	    new = gen_rtx_CONST (Pmode, new);
2444 	    new = force_const_mem (Pmode, new);
2445 	    temp = gen_reg_rtx (Pmode);
2446 	    emit_move_insn (temp, new);
2447 
2448             new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2449 	    new = gen_rtx_MEM (Pmode, new);
2450 	    RTX_UNCHANGING_P (new) = 1;
2451 
2452 	    new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2453 	    temp = gen_reg_rtx (Pmode);
2454 	    emit_insn (gen_rtx_SET (Pmode, temp, new));
2455 	  }
2456 	else
2457 	  {
2458 	    /* In position-dependent code, load the absolute address of
2459 	       the GOT entry from the literal pool.  */
2460 
2461 	    new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2462 	    new = gen_rtx_CONST (Pmode, new);
2463 	    new = force_const_mem (Pmode, new);
2464 	    temp = gen_reg_rtx (Pmode);
2465 	    emit_move_insn (temp, new);
2466 
2467 	    new = temp;
2468 	    new = gen_rtx_MEM (Pmode, new);
2469 	    RTX_UNCHANGING_P (new) = 1;
2470 
2471 	    new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2472 	    temp = gen_reg_rtx (Pmode);
2473 	    emit_insn (gen_rtx_SET (Pmode, temp, new));
2474 	  }
2475 
2476 	new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2477 	if (reg != 0)
2478 	  {
2479 	    s390_load_address (reg, new);
2480 	    new = reg;
2481 	  }
2482 	break;
2483 
2484       case TLS_MODEL_LOCAL_EXEC:
2485 	new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2486 	new = gen_rtx_CONST (Pmode, new);
2487 	new = force_const_mem (Pmode, new);
2488         temp = gen_reg_rtx (Pmode);
2489 	emit_move_insn (temp, new);
2490 
2491 	new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2492 	if (reg != 0)
2493 	  {
2494 	    s390_load_address (reg, new);
2495 	    new = reg;
2496 	  }
2497 	break;
2498 
2499       default:
2500 	abort ();
2501       }
2502 
2503   else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
2504     {
2505       switch (XINT (XEXP (addr, 0), 1))
2506 	{
2507 	case UNSPEC_INDNTPOFF:
2508 	  if (TARGET_64BIT)
2509 	    new = addr;
2510 	  else
2511 	    abort ();
2512 	  break;
2513 
2514 	default:
2515 	  abort ();
2516 	}
2517     }
2518 
2519   else
2520     abort ();  /* for now ... */
2521 
2522   return new;
2523 }
2524 
2525 /* Emit insns to move operands[1] into operands[0].  */
2526 
2527 void
emit_symbolic_move(operands)2528 emit_symbolic_move (operands)
2529      rtx *operands;
2530 {
2531   rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2532 
2533   if (GET_CODE (operands[0]) == MEM)
2534     operands[1] = force_reg (Pmode, operands[1]);
2535   else if (TLS_SYMBOLIC_CONST (operands[1]))
2536     operands[1] = legitimize_tls_address (operands[1], temp);
2537   else if (flag_pic)
2538     operands[1] = legitimize_pic_address (operands[1], temp);
2539 }
2540 
2541 /* Try machine-dependent ways of modifying an illegitimate address X
2542    to be legitimate.  If we find one, return the new, valid address.
2543 
2544    OLDX is the address as it was before break_out_memory_refs was called.
2545    In some cases it is useful to look at this to decide what needs to be done.
2546 
2547    MODE is the mode of the operand pointed to by X.
2548 
2549    When -fpic is used, special handling is needed for symbolic references.
2550    See comments by legitimize_pic_address for details.  */
2551 
2552 rtx
legitimize_address(x,oldx,mode)2553 legitimize_address (x, oldx, mode)
2554      register rtx x;
2555      register rtx oldx ATTRIBUTE_UNUSED;
2556      enum machine_mode mode ATTRIBUTE_UNUSED;
2557 {
2558   rtx constant_term = const0_rtx;
2559 
2560   if (TLS_SYMBOLIC_CONST (x))
2561     {
2562       x = legitimize_tls_address (x, 0);
2563 
2564       if (legitimate_address_p (mode, x, FALSE))
2565 	return x;
2566     }
2567   else if (flag_pic)
2568     {
2569       if (SYMBOLIC_CONST (x)
2570           || (GET_CODE (x) == PLUS
2571               && (SYMBOLIC_CONST (XEXP (x, 0))
2572                   || SYMBOLIC_CONST (XEXP (x, 1)))))
2573 	  x = legitimize_pic_address (x, 0);
2574 
2575       if (legitimate_address_p (mode, x, FALSE))
2576 	return x;
2577     }
2578 
2579   x = eliminate_constant_term (x, &constant_term);
2580 
2581   /* Optimize loading of large displacements by splitting them
2582      into the multiple of 4K and the rest; this allows the
2583      former to be CSE'd if possible.
2584 
2585      Don't do this if the displacement is added to a register
2586      pointing into the stack frame, as the offsets will
2587      change later anyway.  */
2588 
2589   if (GET_CODE (constant_term) == CONST_INT
2590       && (INTVAL (constant_term) < 0
2591           || INTVAL (constant_term) >= 4096)
2592       && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
2593     {
2594       HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
2595       HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
2596 
2597       rtx temp = gen_reg_rtx (Pmode);
2598       rtx val  = force_operand (GEN_INT (upper), temp);
2599       if (val != temp)
2600 	emit_move_insn (temp, val);
2601 
2602       x = gen_rtx_PLUS (Pmode, x, temp);
2603       constant_term = GEN_INT (lower);
2604     }
2605 
2606   if (GET_CODE (x) == PLUS)
2607     {
2608       if (GET_CODE (XEXP (x, 0)) == REG)
2609 	{
2610 	  register rtx temp = gen_reg_rtx (Pmode);
2611 	  register rtx val  = force_operand (XEXP (x, 1), temp);
2612 	  if (val != temp)
2613 	    emit_move_insn (temp, val);
2614 
2615 	  x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2616 	}
2617 
2618       else if (GET_CODE (XEXP (x, 1)) == REG)
2619 	{
2620 	  register rtx temp = gen_reg_rtx (Pmode);
2621 	  register rtx val  = force_operand (XEXP (x, 0), temp);
2622 	  if (val != temp)
2623 	    emit_move_insn (temp, val);
2624 
2625 	  x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2626 	}
2627     }
2628 
2629   if (constant_term != const0_rtx)
2630     x = gen_rtx_PLUS (Pmode, x, constant_term);
2631 
2632   return x;
2633 }
2634 
2635 /* Emit code to move LEN bytes from DST to SRC.  */
2636 
2637 void
s390_expand_movstr(dst,src,len)2638 s390_expand_movstr (dst, src, len)
2639      rtx dst;
2640      rtx src;
2641      rtx len;
2642 {
2643   rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2644     TARGET_64BIT ? gen_movstr_short_64 : gen_movstr_short_31;
2645   rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2646     TARGET_64BIT ? gen_movstr_long_64 : gen_movstr_long_31;
2647 
2648 
2649   if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2650     {
2651       if (INTVAL (len) > 0)
2652         emit_insn ((*gen_short) (dst, src, GEN_INT (INTVAL (len) - 1)));
2653     }
2654 
2655   else if (TARGET_MVCLE)
2656     {
2657       enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2658       enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2659       rtx reg0 = gen_reg_rtx (double_mode);
2660       rtx reg1 = gen_reg_rtx (double_mode);
2661 
2662       emit_insn (gen_rtx_CLOBBER (VOIDmode, reg0));
2663       emit_insn (gen_rtx_CLOBBER (VOIDmode, reg1));
2664 
2665       emit_move_insn (gen_highpart (single_mode, reg0),
2666 		      force_operand (XEXP (dst, 0), NULL_RTX));
2667       emit_move_insn (gen_highpart (single_mode, reg1),
2668 		      force_operand (XEXP (src, 0), NULL_RTX));
2669 
2670       convert_move (gen_lowpart (single_mode, reg0), len, 1);
2671       convert_move (gen_lowpart (single_mode, reg1), len, 1);
2672 
2673       emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2674     }
2675 
2676   else
2677     {
2678       rtx dst_addr, src_addr, count, blocks, temp;
2679       rtx end_label = gen_label_rtx ();
2680       enum machine_mode mode;
2681       tree type;
2682 
2683       mode = GET_MODE (len);
2684       if (mode == VOIDmode)
2685         mode = word_mode;
2686 
2687       type = (*lang_hooks.types.type_for_mode) (mode, 1);
2688       if (!type)
2689         abort ();
2690 
2691       dst_addr = gen_reg_rtx (Pmode);
2692       src_addr = gen_reg_rtx (Pmode);
2693       count = gen_reg_rtx (mode);
2694       blocks = gen_reg_rtx (mode);
2695 
2696       convert_move (count, len, 1);
2697       emit_cmp_and_jump_insns (count, const0_rtx,
2698 			       EQ, NULL_RTX, mode, 1, end_label);
2699 
2700       emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2701       emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
2702       dst = change_address (dst, VOIDmode, dst_addr);
2703       src = change_address (src, VOIDmode, src_addr);
2704 
2705       temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2706       if (temp != count)
2707         emit_move_insn (count, temp);
2708 
2709       temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2710       if (temp != blocks)
2711         emit_move_insn (blocks, temp);
2712 
2713       expand_start_loop (1);
2714       expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2715 					   make_tree (type, blocks),
2716 					   make_tree (type, const0_rtx)));
2717 
2718       emit_insn ((*gen_short) (dst, src, GEN_INT (255)));
2719       s390_load_address (dst_addr,
2720 			 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2721       s390_load_address (src_addr,
2722 			 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
2723 
2724       temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2725       if (temp != blocks)
2726         emit_move_insn (blocks, temp);
2727 
2728       expand_end_loop ();
2729 
2730       emit_insn ((*gen_short) (dst, src, convert_to_mode (word_mode, count, 1)));
2731       emit_label (end_label);
2732     }
2733 }
2734 
2735 /* Emit code to clear LEN bytes at DST.  */
2736 
2737 void
s390_expand_clrstr(dst,len)2738 s390_expand_clrstr (dst, len)
2739      rtx dst;
2740      rtx len;
2741 {
2742   rtx (*gen_short) PARAMS ((rtx, rtx)) =
2743     TARGET_64BIT ? gen_clrstr_short_64 : gen_clrstr_short_31;
2744   rtx (*gen_long) PARAMS ((rtx, rtx, rtx)) =
2745     TARGET_64BIT ? gen_clrstr_long_64 : gen_clrstr_long_31;
2746 
2747 
2748   if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2749     {
2750       if (INTVAL (len) > 0)
2751         emit_insn ((*gen_short) (dst, GEN_INT (INTVAL (len) - 1)));
2752     }
2753 
2754   else if (TARGET_MVCLE)
2755     {
2756       enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2757       enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2758       rtx reg0 = gen_reg_rtx (double_mode);
2759       rtx reg1 = gen_reg_rtx (double_mode);
2760 
2761       emit_insn (gen_rtx_CLOBBER (VOIDmode, reg0));
2762       emit_insn (gen_rtx_CLOBBER (VOIDmode, reg1));
2763 
2764       emit_move_insn (gen_highpart (single_mode, reg0),
2765 		      force_operand (XEXP (dst, 0), NULL_RTX));
2766       convert_move (gen_lowpart (single_mode, reg0), len, 1);
2767 
2768       emit_move_insn (gen_highpart (single_mode, reg1), const0_rtx);
2769       emit_move_insn (gen_lowpart (single_mode, reg1), const0_rtx);
2770 
2771       emit_insn ((*gen_long) (reg0, reg1, reg0));
2772     }
2773 
2774   else
2775     {
2776       rtx dst_addr, src_addr, count, blocks, temp;
2777       rtx end_label = gen_label_rtx ();
2778       enum machine_mode mode;
2779       tree type;
2780 
2781       mode = GET_MODE (len);
2782       if (mode == VOIDmode)
2783         mode = word_mode;
2784 
2785       type = (*lang_hooks.types.type_for_mode) (mode, 1);
2786       if (!type)
2787         abort ();
2788 
2789       dst_addr = gen_reg_rtx (Pmode);
2790       src_addr = gen_reg_rtx (Pmode);
2791       count = gen_reg_rtx (mode);
2792       blocks = gen_reg_rtx (mode);
2793 
2794       convert_move (count, len, 1);
2795       emit_cmp_and_jump_insns (count, const0_rtx,
2796 			       EQ, NULL_RTX, mode, 1, end_label);
2797 
2798       emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2799       dst = change_address (dst, VOIDmode, dst_addr);
2800 
2801       temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2802       if (temp != count)
2803         emit_move_insn (count, temp);
2804 
2805       temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2806       if (temp != blocks)
2807         emit_move_insn (blocks, temp);
2808 
2809       expand_start_loop (1);
2810       expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2811 					   make_tree (type, blocks),
2812 					   make_tree (type, const0_rtx)));
2813 
2814       emit_insn ((*gen_short) (dst, GEN_INT (255)));
2815       s390_load_address (dst_addr,
2816 			 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2817 
2818       temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2819       if (temp != blocks)
2820         emit_move_insn (blocks, temp);
2821 
2822       expand_end_loop ();
2823 
2824       emit_insn ((*gen_short) (dst, convert_to_mode (word_mode, count, 1)));
2825       emit_label (end_label);
2826     }
2827 }
2828 
2829 /* Emit code to compare LEN bytes at OP0 with those at OP1,
2830    and return the result in TARGET.  */
2831 
2832 void
s390_expand_cmpmem(target,op0,op1,len)2833 s390_expand_cmpmem (target, op0, op1, len)
2834      rtx target;
2835      rtx op0;
2836      rtx op1;
2837      rtx len;
2838 {
2839   rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2840     TARGET_64BIT ? gen_cmpmem_short_64 : gen_cmpmem_short_31;
2841   rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2842     TARGET_64BIT ? gen_cmpmem_long_64 : gen_cmpmem_long_31;
2843   rtx (*gen_result) PARAMS ((rtx)) =
2844     GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
2845 
2846   op0 = protect_from_queue (op0, 0);
2847   op1 = protect_from_queue (op1, 0);
2848   len = protect_from_queue (len, 0);
2849 
2850   if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2851     {
2852       if (INTVAL (len) > 0)
2853         {
2854           emit_insn ((*gen_short) (op0, op1, GEN_INT (INTVAL (len) - 1)));
2855           emit_insn ((*gen_result) (target));
2856         }
2857       else
2858         emit_move_insn (target, const0_rtx);
2859     }
2860 
2861   else /* if (TARGET_MVCLE) */
2862     {
2863       enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2864       enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2865       rtx reg0 = gen_reg_rtx (double_mode);
2866       rtx reg1 = gen_reg_rtx (double_mode);
2867 
2868       emit_insn (gen_rtx_CLOBBER (VOIDmode, reg0));
2869       emit_insn (gen_rtx_CLOBBER (VOIDmode, reg1));
2870 
2871       emit_move_insn (gen_highpart (single_mode, reg0),
2872 		      force_operand (XEXP (op0, 0), NULL_RTX));
2873       emit_move_insn (gen_highpart (single_mode, reg1),
2874 		      force_operand (XEXP (op1, 0), NULL_RTX));
2875 
2876       convert_move (gen_lowpart (single_mode, reg0), len, 1);
2877       convert_move (gen_lowpart (single_mode, reg1), len, 1);
2878 
2879       emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2880       emit_insn ((*gen_result) (target));
2881     }
2882 
2883 #if 0
2884   /* Deactivate for now as profile code cannot cope with
2885      CC being live across basic block boundaries.  */
2886   else
2887     {
2888       rtx addr0, addr1, count, blocks, temp;
2889       rtx end_label = gen_label_rtx ();
2890       enum machine_mode mode;
2891       tree type;
2892 
2893       mode = GET_MODE (len);
2894       if (mode == VOIDmode)
2895         mode = word_mode;
2896 
2897       type = (*lang_hooks.types.type_for_mode) (mode, 1);
2898       if (!type)
2899         abort ();
2900 
2901       addr0 = gen_reg_rtx (Pmode);
2902       addr1 = gen_reg_rtx (Pmode);
2903       count = gen_reg_rtx (mode);
2904       blocks = gen_reg_rtx (mode);
2905 
2906       convert_move (count, len, 1);
2907       emit_cmp_and_jump_insns (count, const0_rtx,
2908 			       EQ, NULL_RTX, mode, 1, end_label);
2909 
2910       emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
2911       emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
2912       op0 = change_address (op0, VOIDmode, addr0);
2913       op1 = change_address (op1, VOIDmode, addr1);
2914 
2915       temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2916       if (temp != count)
2917         emit_move_insn (count, temp);
2918 
2919       temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2920       if (temp != blocks)
2921         emit_move_insn (blocks, temp);
2922 
2923       expand_start_loop (1);
2924       expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2925 					   make_tree (type, blocks),
2926 					   make_tree (type, const0_rtx)));
2927 
2928       emit_insn ((*gen_short) (op0, op1, GEN_INT (255)));
2929       temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
2930       temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
2931 			gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
2932       temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
2933       emit_jump_insn (temp);
2934 
2935       s390_load_address (addr0,
2936 			 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
2937       s390_load_address (addr1,
2938 			 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
2939 
2940       temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2941       if (temp != blocks)
2942         emit_move_insn (blocks, temp);
2943 
2944       expand_end_loop ();
2945 
2946       emit_insn ((*gen_short) (op0, op1, convert_to_mode (word_mode, count, 1)));
2947       emit_label (end_label);
2948 
2949       emit_insn ((*gen_result) (target));
2950     }
2951 #endif
2952 }
2953 
2954 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
2955    We need to emit DTP-relative relocations.  */
2956 
2957 void
s390_output_dwarf_dtprel(file,size,x)2958 s390_output_dwarf_dtprel (file, size, x)
2959      FILE *file;
2960      int size;
2961      rtx x;
2962 {
2963   switch (size)
2964     {
2965     case 4:
2966       fputs ("\t.long\t", file);
2967       break;
2968     case 8:
2969       fputs ("\t.quad\t", file);
2970       break;
2971     default:
2972       abort ();
2973     }
2974   output_addr_const (file, x);
2975   fputs ("@DTPOFF", file);
2976 }
2977 
2978 /* In the name of slightly smaller debug output, and to cater to
2979    general assembler losage, recognize various UNSPEC sequences
2980    and turn them back into a direct symbol reference.  */
2981 
2982 rtx
s390_simplify_dwarf_addr(orig_x)2983 s390_simplify_dwarf_addr (orig_x)
2984      rtx orig_x;
2985 {
2986   rtx x = orig_x, y;
2987 
2988   if (GET_CODE (x) != MEM)
2989     return orig_x;
2990 
2991   x = XEXP (x, 0);
2992   if (GET_CODE (x) == PLUS
2993       && GET_CODE (XEXP (x, 1)) == CONST
2994       && GET_CODE (XEXP (x, 0)) == REG
2995       && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
2996     {
2997       y = XEXP (XEXP (x, 1), 0);
2998       if (GET_CODE (y) == UNSPEC
2999 	  && XINT (y, 1) == 110)
3000 	return XVECEXP (y, 0, 0);
3001       return orig_x;
3002     }
3003 
3004   if (GET_CODE (x) == CONST)
3005     {
3006       y = XEXP (x, 0);
3007       if (GET_CODE (y) == UNSPEC
3008 	  && XINT (y, 1) == 111)
3009 	return XVECEXP (y, 0, 0);
3010       return orig_x;
3011     }
3012 
3013   return orig_x;
3014 }
3015 
3016 /* Locate some local-dynamic symbol still in use by this function
3017    so that we can print its name in local-dynamic base patterns.  */
3018 
3019 static const char *
get_some_local_dynamic_name()3020 get_some_local_dynamic_name ()
3021 {
3022   rtx insn;
3023 
3024   if (cfun->machine->some_ld_name)
3025     return cfun->machine->some_ld_name;
3026 
3027   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3028     if (INSN_P (insn)
3029         && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3030       return cfun->machine->some_ld_name;
3031 
3032   abort ();
3033 }
3034 
3035 static int
get_some_local_dynamic_name_1(px,data)3036 get_some_local_dynamic_name_1 (px, data)
3037      rtx *px;
3038      void *data ATTRIBUTE_UNUSED;
3039 {
3040   rtx x = *px;
3041 
3042   if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3043     {
3044       x = get_pool_constant (x);
3045       return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3046     }
3047 
3048   if (GET_CODE (x) == SYMBOL_REF
3049       && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3050     {
3051       cfun->machine->some_ld_name = XSTR (x, 0);
3052       return 1;
3053     }
3054 
3055   return 0;
3056 }
3057 
3058 /* Output symbolic constant X in assembler syntax to
3059    stdio stream FILE.  */
3060 
3061 void
s390_output_symbolic_const(file,x)3062 s390_output_symbolic_const (file, x)
3063      FILE *file;
3064      rtx x;
3065 {
3066   switch (GET_CODE (x))
3067     {
3068     case CONST:
3069     case ZERO_EXTEND:
3070     case SIGN_EXTEND:
3071       s390_output_symbolic_const (file, XEXP (x, 0));
3072       break;
3073 
3074     case PLUS:
3075       s390_output_symbolic_const (file, XEXP (x, 0));
3076       fprintf (file, "+");
3077       s390_output_symbolic_const (file, XEXP (x, 1));
3078       break;
3079 
3080     case MINUS:
3081       s390_output_symbolic_const (file, XEXP (x, 0));
3082       fprintf (file, "-");
3083       s390_output_symbolic_const (file, XEXP (x, 1));
3084       break;
3085 
3086     case CONST_INT:
3087     case LABEL_REF:
3088     case CODE_LABEL:
3089     case SYMBOL_REF:
3090       output_addr_const (file, x);
3091       break;
3092 
3093     case UNSPEC:
3094       if (XVECLEN (x, 0) != 1)
3095         output_operand_lossage ("invalid UNSPEC as operand (1)");
3096       switch (XINT (x, 1))
3097         {
3098         case 100:
3099         case 104:
3100 	  s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3101           fprintf (file, "-");
3102 	  s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
3103  	  break;
3104         case 105:
3105 	  s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
3106           fprintf (file, "-");
3107 	  s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3108 	  break;
3109 	case 110:
3110 	  s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3111 	  fprintf (file, "@GOT12");
3112 	  break;
3113 	case 111:
3114 	  s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3115 	  fprintf (file, "@GOTENT");
3116 	  break;
3117 	case 112:
3118 	  s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3119 	  fprintf (file, "@GOT");
3120 	  break;
3121 	case 113:
3122 	  s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3123 	  fprintf (file, "@PLT");
3124 	  break;
3125 	case 114:
3126 	  s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3127           fprintf (file, "@PLT-");
3128 	  s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
3129 	  break;
3130 	case UNSPEC_TLSGD:
3131 	  s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3132 	  fprintf (file, "@TLSGD");
3133 	  break;
3134 	case UNSPEC_TLSLDM:
3135 	  assemble_name (file, get_some_local_dynamic_name ());
3136 	  fprintf (file, "@TLSLDM");
3137 	  break;
3138 	case UNSPEC_DTPOFF:
3139 	  s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3140 	  fprintf (file, "@DTPOFF");
3141 	  break;
3142 	case UNSPEC_NTPOFF:
3143 	  s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3144 	  fprintf (file, "@NTPOFF");
3145 	  break;
3146 	case UNSPEC_GOTNTPOFF:
3147 	  s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3148 	  fprintf (file, "@GOTNTPOFF");
3149 	  break;
3150 	case UNSPEC_INDNTPOFF:
3151 	  s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3152 	  fprintf (file, "@INDNTPOFF");
3153 	  break;
3154 	default:
3155 	  output_operand_lossage ("invalid UNSPEC as operand (2)");
3156 	  break;
3157         }
3158       break;
3159 
3160     default:
3161       fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
3162       break;
3163     }
3164 }
3165 
3166 /* Output address operand ADDR in assembler syntax to
3167    stdio stream FILE.  */
3168 
3169 void
print_operand_address(file,addr)3170 print_operand_address (file, addr)
3171      FILE *file;
3172      rtx addr;
3173 {
3174   struct s390_address ad;
3175 
3176   if (!s390_decompose_address (addr, &ad)
3177       || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3178       || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
3179     output_operand_lossage ("Cannot decompose address.");
3180 
3181   if (ad.disp)
3182     s390_output_symbolic_const (file, ad.disp);
3183   else
3184     fprintf (file, "0");
3185 
3186   if (ad.base && ad.indx)
3187     fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
3188                               reg_names[REGNO (ad.base)]);
3189   else if (ad.base)
3190     fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
3191 }
3192 
3193 /* Output operand X in assembler syntax to stdio stream FILE.
3194    CODE specified the format flag.  The following format flags
3195    are recognized:
3196 
3197     'C': print opcode suffix for branch condition.
3198     'D': print opcode suffix for inverse branch condition.
3199     'J': print tls_load/tls_gdcall/tls_ldcall suffix
3200     'O': print only the displacement of a memory reference.
3201     'R': print only the base register of a memory reference.
3202     'N': print the second word of a DImode operand.
3203     'M': print the second word of a TImode operand.
3204 
3205     'b': print integer X as if it's an unsigned byte.
3206     'x': print integer X as if it's an unsigned word.
3207     'h': print integer X as if it's a signed word.  */
3208 
3209 void
print_operand(file,x,code)3210 print_operand (file, x, code)
3211      FILE *file;
3212      rtx x;
3213      int code;
3214 {
3215   switch (code)
3216     {
3217     case 'C':
3218       fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
3219       return;
3220 
3221     case 'D':
3222       fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
3223       return;
3224 
3225     case 'J':
3226       if (GET_CODE (x) == SYMBOL_REF)
3227 	{
3228 	  fprintf (file, "%s", ":tls_load:");
3229 	  output_addr_const (file, x);
3230 	}
3231       else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
3232 	{
3233 	  fprintf (file, "%s", ":tls_gdcall:");
3234 	  output_addr_const (file, XVECEXP (x, 0, 0));
3235 	}
3236       else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
3237 	{
3238 	  fprintf (file, "%s", ":tls_ldcall:");
3239 	  assemble_name (file, get_some_local_dynamic_name ());
3240 	}
3241       else
3242 	abort ();
3243       return;
3244 
3245     case 'O':
3246       {
3247         struct s390_address ad;
3248 
3249         if (GET_CODE (x) != MEM
3250             || !s390_decompose_address (XEXP (x, 0), &ad)
3251 	    || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3252             || ad.indx)
3253           abort ();
3254 
3255         if (ad.disp)
3256           s390_output_symbolic_const (file, ad.disp);
3257         else
3258           fprintf (file, "0");
3259       }
3260       return;
3261 
3262     case 'R':
3263       {
3264         struct s390_address ad;
3265 
3266         if (GET_CODE (x) != MEM
3267             || !s390_decompose_address (XEXP (x, 0), &ad)
3268 	    || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3269             || ad.indx)
3270           abort ();
3271 
3272         if (ad.base)
3273           fprintf (file, "%s", reg_names[REGNO (ad.base)]);
3274         else
3275           fprintf (file, "0");
3276       }
3277       return;
3278 
3279     case 'N':
3280       if (GET_CODE (x) == REG)
3281 	x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3282       else if (GET_CODE (x) == MEM)
3283 	x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
3284       else
3285         abort ();
3286       break;
3287 
3288     case 'M':
3289       if (GET_CODE (x) == REG)
3290 	x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3291       else if (GET_CODE (x) == MEM)
3292 	x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
3293       else
3294         abort ();
3295       break;
3296     }
3297 
3298   switch (GET_CODE (x))
3299     {
3300     case REG:
3301       fprintf (file, "%s", reg_names[REGNO (x)]);
3302       break;
3303 
3304     case MEM:
3305       output_address (XEXP (x, 0));
3306       break;
3307 
3308     case CONST:
3309     case CODE_LABEL:
3310     case LABEL_REF:
3311     case SYMBOL_REF:
3312       s390_output_symbolic_const (file, x);
3313       break;
3314 
3315     case CONST_INT:
3316       if (code == 'b')
3317         fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
3318       else if (code == 'x')
3319         fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
3320       else if (code == 'h')
3321         fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
3322       else
3323         fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3324       break;
3325 
3326     case CONST_DOUBLE:
3327       if (GET_MODE (x) != VOIDmode)
3328         abort ();
3329       if (code == 'b')
3330         fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
3331       else if (code == 'x')
3332         fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
3333       else if (code == 'h')
3334         fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
3335       else
3336         abort ();
3337       break;
3338 
3339     default:
3340       fatal_insn ("UNKNOWN in print_operand !?", x);
3341       break;
3342     }
3343 }
3344 
3345 /* Target hook for assembling integer objects.  We need to define it
3346    here to work a round a bug in some versions of GAS, which couldn't
3347    handle values smaller than INT_MIN when printed in decimal.  */
3348 
3349 static bool
s390_assemble_integer(x,size,aligned_p)3350 s390_assemble_integer (x, size, aligned_p)
3351      rtx x;
3352      unsigned int size;
3353      int aligned_p;
3354 {
3355   if (size == 8 && aligned_p
3356       && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
3357     {
3358       fputs ("\t.quad\t", asm_out_file);
3359       fprintf (asm_out_file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
3360       putc ('\n', asm_out_file);
3361       return true;
3362     }
3363   return default_assemble_integer (x, size, aligned_p);
3364 }
3365 
3366 
3367 #define DEBUG_SCHED 0
3368 
3369 /* Returns true if register REGNO is used  for forming
3370    a memory address in expression X.  */
3371 
3372 static int
reg_used_in_mem_p(regno,x)3373 reg_used_in_mem_p (regno, x)
3374      int regno;
3375      rtx x;
3376 {
3377   enum rtx_code code = GET_CODE (x);
3378   int i, j;
3379   const char *fmt;
3380 
3381   if (code == MEM)
3382     {
3383       if (refers_to_regno_p (regno, regno+1,
3384 			     XEXP (x, 0), 0))
3385 	return 1;
3386     }
3387   else if (code == SET
3388 	   && GET_CODE (SET_DEST (x)) == PC)
3389     {
3390       if (refers_to_regno_p (regno, regno+1,
3391 			     SET_SRC (x), 0))
3392 	return 1;
3393     }
3394 
3395   fmt = GET_RTX_FORMAT (code);
3396   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3397     {
3398       if (fmt[i] == 'e'
3399 	  && reg_used_in_mem_p (regno, XEXP (x, i)))
3400 	return 1;
3401 
3402       else if (fmt[i] == 'E')
3403 	for (j = 0; j < XVECLEN (x, i); j++)
3404 	  if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
3405 	    return 1;
3406     }
3407   return 0;
3408 }
3409 
3410 /* Returns true if expression DEP_RTX sets an address register
3411    used by instruction INSN to address memory.  */
3412 
3413 static int
addr_generation_dependency_p(dep_rtx,insn)3414 addr_generation_dependency_p (dep_rtx, insn)
3415      rtx dep_rtx;
3416      rtx insn;
3417 {
3418   rtx target, pat;
3419 
3420   if (GET_CODE (dep_rtx) == SET)
3421     {
3422       target = SET_DEST (dep_rtx);
3423       if (GET_CODE (target) == STRICT_LOW_PART)
3424 	target = XEXP (target, 0);
3425       while (GET_CODE (target) == SUBREG)
3426 	target = SUBREG_REG (target);
3427 
3428       if (GET_CODE (target) == REG)
3429 	{
3430 	  int regno = REGNO (target);
3431 
3432 	  if (get_attr_type (insn) == TYPE_LA)
3433 	    {
3434 	      pat = PATTERN (insn);
3435 	      if (GET_CODE (pat) == PARALLEL)
3436 		{
3437 		  if (XVECLEN (pat, 0) != 2)
3438 		    abort();
3439 		  pat = XVECEXP (pat, 0, 0);
3440 		}
3441 	      if (GET_CODE (pat) == SET)
3442 		return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
3443 	      else
3444 		abort();
3445 	    }
3446 	  else if (get_attr_atype (insn) == ATYPE_MEM)
3447 	    return reg_used_in_mem_p (regno, PATTERN (insn));
3448 	}
3449     }
3450   return 0;
3451 }
3452 
3453 
3454 /* Return the modified cost of the dependency of instruction INSN
3455    on instruction DEP_INSN through the link LINK.  COST is the
3456    default cost of that dependency.
3457 
3458    Data dependencies are all handled without delay.  However, if a
3459    register is modified and subsequently used as base or index
3460    register of a memory reference, at least 4 cycles need to pass
3461    between setting and using the register to avoid pipeline stalls.
3462    An exception is the LA instruction. An address generated by LA can
3463    be used by introducing only a one cycle stall on the pipeline.  */
3464 
3465 static int
s390_adjust_cost(insn,link,dep_insn,cost)3466 s390_adjust_cost (insn, link, dep_insn, cost)
3467      rtx insn;
3468      rtx link;
3469      rtx dep_insn;
3470      int cost;
3471 {
3472   rtx dep_rtx;
3473   int i;
3474 
3475   /* If the dependence is an anti-dependence, there is no cost.  For an
3476      output dependence, there is sometimes a cost, but it doesn't seem
3477      worth handling those few cases.  */
3478 
3479   if (REG_NOTE_KIND (link) != 0)
3480     return 0;
3481 
3482   /* If we can't recognize the insns, we can't really do anything.  */
3483   if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
3484     return cost;
3485 
3486   dep_rtx = PATTERN (dep_insn);
3487 
3488   if (GET_CODE (dep_rtx) == SET)
3489     {
3490       if (addr_generation_dependency_p (dep_rtx, insn))
3491 	{
3492 	  cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3493 	  if (DEBUG_SCHED)
3494 	    {
3495 	      fprintf (stderr, "\n\nAddress dependency detected: cost %d\n",
3496 		       cost);
3497 	      debug_rtx (dep_insn);
3498 	      debug_rtx (insn);
3499 	    }
3500 	}
3501     }
3502   else if (GET_CODE (dep_rtx) == PARALLEL)
3503     {
3504       for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3505 	{
3506 	  if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i),
3507 					    insn))
3508 	    {
3509 	      cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3510 	      if (DEBUG_SCHED)
3511 		{
3512 		  fprintf (stderr, "\n\nAddress dependency detected: cost %d\n"
3513 			   ,cost);
3514 		  debug_rtx (dep_insn);
3515 		  debug_rtx (insn);
3516 		}
3517 	    }
3518 	}
3519     }
3520 
3521   return cost;
3522 }
3523 
3524 
3525 /* A C statement (sans semicolon) to update the integer scheduling priority
3526    INSN_PRIORITY (INSN).  Reduce the priority to execute the INSN earlier,
3527    increase the priority to execute INSN later.  Do not define this macro if
3528    you do not need to adjust the scheduling priorities of insns.
3529 
3530    A LA instruction maybe scheduled later, since the pipeline bypasses the
3531    calculated value.  */
3532 
3533 static int
s390_adjust_priority(insn,priority)3534 s390_adjust_priority (insn, priority)
3535      rtx insn ATTRIBUTE_UNUSED;
3536      int priority;
3537 {
3538   if (! INSN_P (insn))
3539     return priority;
3540 
3541   if (GET_CODE (PATTERN (insn)) == USE
3542       || GET_CODE (PATTERN (insn)) == CLOBBER)
3543     return priority;
3544 
3545   switch (get_attr_type (insn))
3546     {
3547     default:
3548       break;
3549 
3550     case TYPE_LA:
3551       if (priority >= 0 && priority < 0x01000000)
3552 	priority <<= 3;
3553       break;
3554     case TYPE_LM:
3555       /* LM in epilogue should never be scheduled. This
3556 	 is due to literal access done in function body.
3557 	 The usage of register 13 is not mentioned explicitly,
3558 	 leading to scheduling 'LM' accross this instructions.
3559       */
3560       priority = 0x7fffffff;
3561       break;
3562     }
3563 
3564   return priority;
3565 }
3566 
3567 
3568 /* Split all branches that exceed the maximum distance.
3569    Returns true if this created a new literal pool entry.
3570 
3571    Code generated by this routine is allowed to use
3572    TEMP_REG as temporary scratch register.  If this is
3573    done, TEMP_USED is set to true.  */
3574 
3575 static int
s390_split_branches(temp_reg,temp_used)3576 s390_split_branches (temp_reg, temp_used)
3577      rtx temp_reg;
3578      bool *temp_used;
3579 {
3580   int new_literal = 0;
3581   rtx insn, pat, tmp, target;
3582   rtx *label;
3583 
3584   /* We need correct insn addresses.  */
3585 
3586   shorten_branches (get_insns ());
3587 
3588   /* Find all branches that exceed 64KB, and split them.  */
3589 
3590   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3591     {
3592       if (GET_CODE (insn) != JUMP_INSN)
3593 	continue;
3594 
3595       pat = PATTERN (insn);
3596       if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3597 	pat = XVECEXP (pat, 0, 0);
3598       if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
3599 	continue;
3600 
3601       if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
3602 	{
3603 	  label = &SET_SRC (pat);
3604 	}
3605       else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
3606 	{
3607 	  if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
3608 	    label = &XEXP (SET_SRC (pat), 1);
3609           else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
3610             label = &XEXP (SET_SRC (pat), 2);
3611 	  else
3612 	    continue;
3613         }
3614       else
3615 	continue;
3616 
3617       if (get_attr_length (insn) <= (TARGET_64BIT ? 6 : 4))
3618 	continue;
3619 
3620       *temp_used = 1;
3621 
3622       if (TARGET_64BIT)
3623 	{
3624 	  tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, *label), insn);
3625 	  INSN_ADDRESSES_NEW (tmp, -1);
3626 
3627 	  target = temp_reg;
3628 	}
3629       else if (!flag_pic)
3630 	{
3631 	  new_literal = 1;
3632 	  tmp = force_const_mem (Pmode, *label);
3633 	  tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3634 	  INSN_ADDRESSES_NEW (tmp, -1);
3635 
3636 	  target = temp_reg;
3637 	}
3638       else
3639 	{
3640 	  new_literal = 1;
3641 	  tmp = gen_rtx_UNSPEC (SImode, gen_rtvec (1, *label), 104);
3642 	  tmp = gen_rtx_CONST (SImode, tmp);
3643 	  tmp = force_const_mem (SImode, tmp);
3644 	  tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3645 	  INSN_ADDRESSES_NEW (tmp, -1);
3646 
3647 	  target = gen_rtx_REG (Pmode, BASE_REGISTER);
3648 	  target = gen_rtx_PLUS (Pmode, target, temp_reg);
3649 	}
3650 
3651       if (!validate_change (insn, label, target, 0))
3652 	abort ();
3653     }
3654 
3655   return new_literal;
3656 }
3657 
3658 
3659 /* Find a literal pool symbol referenced in RTX X, and store
3660    it at REF.  Will abort if X contains references to more than
3661    one such pool symbol; multiple references to the same symbol
3662    are allowed, however.
3663 
3664    The rtx pointed to by REF must be initialized to NULL_RTX
3665    by the caller before calling this routine.  */
3666 
3667 static void
find_constant_pool_ref(x,ref)3668 find_constant_pool_ref (x, ref)
3669      rtx x;
3670      rtx *ref;
3671 {
3672   int i, j;
3673   const char *fmt;
3674 
3675   if (GET_CODE (x) == SYMBOL_REF
3676       && CONSTANT_POOL_ADDRESS_P (x))
3677     {
3678       if (*ref == NULL_RTX)
3679         *ref = x;
3680       else if (*ref != x)
3681         abort();
3682     }
3683 
3684   fmt = GET_RTX_FORMAT (GET_CODE (x));
3685   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3686     {
3687       if (fmt[i] == 'e')
3688         {
3689           find_constant_pool_ref (XEXP (x, i), ref);
3690         }
3691       else if (fmt[i] == 'E')
3692         {
3693           for (j = 0; j < XVECLEN (x, i); j++)
3694             find_constant_pool_ref (XVECEXP (x, i, j), ref);
3695         }
3696     }
3697 }
3698 
3699 /* Replace every reference to the literal pool symbol REF
3700    in X by the address ADDR.  Fix up MEMs as required.  */
3701 
3702 static void
replace_constant_pool_ref(x,ref,addr)3703 replace_constant_pool_ref (x, ref, addr)
3704      rtx *x;
3705      rtx ref;
3706      rtx addr;
3707 {
3708   int i, j;
3709   const char *fmt;
3710 
3711   if (*x == ref)
3712     abort ();
3713 
3714   /* Literal pool references can only occur inside a MEM ...  */
3715   if (GET_CODE (*x) == MEM)
3716     {
3717       rtx memref = XEXP (*x, 0);
3718 
3719       if (memref == ref)
3720 	{
3721 	  *x = replace_equiv_address (*x, addr);
3722 	  return;
3723 	}
3724 
3725       if (GET_CODE (memref) == CONST
3726 	  && GET_CODE (XEXP (memref, 0)) == PLUS
3727 	  && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
3728 	  && XEXP (XEXP (memref, 0), 0) == ref)
3729 	{
3730 	  HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
3731 	  *x = replace_equiv_address (*x, plus_constant (addr, off));
3732 	  return;
3733 	}
3734     }
3735 
3736   /* ... or a load-address type pattern.  */
3737   if (GET_CODE (*x) == SET)
3738     {
3739       rtx addrref = SET_SRC (*x);
3740 
3741       if (addrref == ref)
3742 	{
3743 	  SET_SRC (*x) = addr;
3744 	  return;
3745 	}
3746 
3747       if (GET_CODE (addrref) == CONST
3748 	  && GET_CODE (XEXP (addrref, 0)) == PLUS
3749 	  && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
3750 	  && XEXP (XEXP (addrref, 0), 0) == ref)
3751 	{
3752 	  HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
3753 	  SET_SRC (*x) = plus_constant (addr, off);
3754 	  return;
3755 	}
3756     }
3757 
3758   fmt = GET_RTX_FORMAT (GET_CODE (*x));
3759   for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3760     {
3761       if (fmt[i] == 'e')
3762         {
3763           replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
3764         }
3765       else if (fmt[i] == 'E')
3766         {
3767           for (j = 0; j < XVECLEN (*x, i); j++)
3768             replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
3769         }
3770     }
3771 }
3772 
3773 /* Check whether ADDR is an address that uses the base register,
3774    without actually constituting a literal pool access.  (This happens
3775    in 31-bit PIC mode, where the base register is used as anchor for
3776    relative addressing of local symbols.)
3777 
3778    Returns 1 if the base register occupies the base slot,
3779    returns 2 if the base register occupies the index slot,
3780    returns 0 if the address is not of this form.  */
3781 
3782 static int
find_base_register_in_addr(addr)3783 find_base_register_in_addr (addr)
3784      struct s390_address *addr;
3785 {
3786   /* If DISP is complex, we might have a literal pool reference.  */
3787   if (addr->disp && GET_CODE (addr->disp) != CONST_INT)
3788     return 0;
3789 
3790   if (addr->base && REG_P (addr->base) && REGNO (addr->base) == BASE_REGISTER)
3791     return 1;
3792 
3793   if (addr->indx && REG_P (addr->indx) && REGNO (addr->indx) == BASE_REGISTER)
3794     return 2;
3795 
3796   return 0;
3797 }
3798 
3799 /* Return true if X contains an address that uses the base register,
3800    without actually constituting a literal pool access.  */
3801 
3802 static bool
find_base_register_ref(x)3803 find_base_register_ref (x)
3804      rtx x;
3805 {
3806   bool retv = FALSE;
3807   struct s390_address addr;
3808   int i, j;
3809   const char *fmt;
3810 
3811   /* Addresses can only occur inside a MEM ...  */
3812   if (GET_CODE (x) == MEM)
3813     {
3814       if (s390_decompose_address (XEXP (x, 0), &addr)
3815 	  && find_base_register_in_addr (&addr))
3816 	return TRUE;
3817     }
3818 
3819   /* ... or a load-address type pattern.  */
3820   if (GET_CODE (x) == SET && GET_CODE (SET_DEST (x)) == REG)
3821     {
3822       if (s390_decompose_address (SET_SRC (x), &addr)
3823 	  && find_base_register_in_addr (&addr))
3824 	return TRUE;
3825     }
3826 
3827   fmt = GET_RTX_FORMAT (GET_CODE (x));
3828   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3829     {
3830       if (fmt[i] == 'e')
3831         {
3832           retv |= find_base_register_ref (XEXP (x, i));
3833         }
3834       else if (fmt[i] == 'E')
3835         {
3836           for (j = 0; j < XVECLEN (x, i); j++)
3837             retv |= find_base_register_ref (XVECEXP (x, i, j));
3838         }
3839     }
3840 
3841   return retv;
3842 }
3843 
3844 /* If X contains an address that uses the base register,
3845    without actually constituting a literal pool access,
3846    replace the base register with REPL in all such cases.
3847 
3848    Handles both MEMs and load address patterns.  */
3849 
3850 static void
replace_base_register_ref(x,repl)3851 replace_base_register_ref (x, repl)
3852      rtx *x;
3853      rtx repl;
3854 {
3855   struct s390_address addr;
3856   rtx new_addr;
3857   int i, j, pos;
3858   const char *fmt;
3859 
3860   /* Addresses can only occur inside a MEM ...  */
3861   if (GET_CODE (*x) == MEM)
3862     {
3863       if (s390_decompose_address (XEXP (*x, 0), &addr)
3864 	  && (pos = find_base_register_in_addr (&addr)))
3865 	{
3866 	  if (pos == 1)
3867 	    addr.base = repl;
3868 	  else
3869 	    addr.indx = repl;
3870 
3871 	  new_addr = addr.base;
3872 	  if (addr.indx)
3873 	    new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
3874 	  if (addr.disp)
3875 	    new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
3876 
3877 	  *x = replace_equiv_address (*x, new_addr);
3878 	  return;
3879 	}
3880     }
3881 
3882   /* ... or a load-address type pattern.  */
3883   if (GET_CODE (*x) == SET && GET_CODE (SET_DEST (*x)) == REG)
3884     {
3885       if (s390_decompose_address (SET_SRC (*x), &addr)
3886 	  && (pos = find_base_register_in_addr (&addr)))
3887 	{
3888 	  if (pos == 1)
3889 	    addr.base = repl;
3890 	  else
3891 	    addr.indx = repl;
3892 
3893 	  new_addr = addr.base;
3894 	  if (addr.indx)
3895 	    new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
3896 	  if (addr.disp)
3897 	    new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
3898 
3899 	  SET_SRC (*x) = new_addr;
3900 	  return;
3901 	}
3902     }
3903 
3904   fmt = GET_RTX_FORMAT (GET_CODE (*x));
3905   for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3906     {
3907       if (fmt[i] == 'e')
3908         {
3909           replace_base_register_ref (&XEXP (*x, i), repl);
3910         }
3911       else if (fmt[i] == 'E')
3912         {
3913           for (j = 0; j < XVECLEN (*x, i); j++)
3914             replace_base_register_ref (&XVECEXP (*x, i, j), repl);
3915         }
3916     }
3917 }
3918 
3919 
3920 /* We keep a list of constants we which we have to add to internal
3921    constant tables in the middle of large functions.  */
3922 
3923 #define NR_C_MODES 6
3924 enum machine_mode constant_modes[NR_C_MODES] =
3925 {
3926   DFmode, DImode,
3927   SFmode, SImode,
3928   HImode,
3929   QImode
3930 };
3931 
3932 rtx (*gen_consttable[NR_C_MODES])(rtx) =
3933 {
3934   gen_consttable_df, gen_consttable_di,
3935   gen_consttable_sf, gen_consttable_si,
3936   gen_consttable_hi,
3937   gen_consttable_qi
3938 };
3939 
3940 struct constant
3941 {
3942   struct constant *next;
3943   rtx value;
3944   rtx label;
3945 };
3946 
3947 struct constant_pool
3948 {
3949   struct constant_pool *next;
3950   rtx first_insn;
3951   rtx pool_insn;
3952   bitmap insns;
3953 
3954   struct constant *constants[NR_C_MODES];
3955   rtx label;
3956   int size;
3957   bool anchor;
3958 };
3959 
3960 static struct constant_pool * s390_chunkify_start PARAMS ((rtx, bool *));
3961 static void s390_chunkify_finish PARAMS ((struct constant_pool *, rtx));
3962 static void s390_chunkify_cancel PARAMS ((struct constant_pool *));
3963 
3964 static struct constant_pool *s390_start_pool PARAMS ((struct constant_pool **, rtx));
3965 static void s390_end_pool PARAMS ((struct constant_pool *, rtx));
3966 static void s390_add_pool_insn PARAMS ((struct constant_pool *, rtx));
3967 static struct constant_pool *s390_find_pool PARAMS ((struct constant_pool *, rtx));
3968 static void s390_add_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
3969 static rtx s390_find_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
3970 static void s390_add_anchor PARAMS ((struct constant_pool *));
3971 static rtx s390_dump_pool PARAMS ((struct constant_pool *));
3972 static void s390_free_pool PARAMS ((struct constant_pool *));
3973 
3974 /* Create new constant pool covering instructions starting at INSN
3975    and chain it to the end of POOL_LIST.  */
3976 
3977 static struct constant_pool *
s390_start_pool(pool_list,insn)3978 s390_start_pool (pool_list, insn)
3979      struct constant_pool **pool_list;
3980      rtx insn;
3981 {
3982   struct constant_pool *pool, **prev;
3983   int i;
3984 
3985   pool = (struct constant_pool *) xmalloc (sizeof *pool);
3986   pool->next = NULL;
3987   for (i = 0; i < NR_C_MODES; i++)
3988     pool->constants[i] = NULL;
3989 
3990   pool->label = gen_label_rtx ();
3991   pool->first_insn = insn;
3992   pool->pool_insn = NULL_RTX;
3993   pool->insns = BITMAP_XMALLOC ();
3994   pool->size = 0;
3995   pool->anchor = FALSE;
3996 
3997   for (prev = pool_list; *prev; prev = &(*prev)->next)
3998     ;
3999   *prev = pool;
4000 
4001   return pool;
4002 }
4003 
4004 /* End range of instructions covered by POOL at INSN and emit
4005    placeholder insn representing the pool.  */
4006 
4007 static void
s390_end_pool(pool,insn)4008 s390_end_pool (pool, insn)
4009      struct constant_pool *pool;
4010      rtx insn;
4011 {
4012   rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4013 
4014   if (!insn)
4015     insn = get_last_insn ();
4016 
4017   pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4018   INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4019 }
4020 
4021 /* Add INSN to the list of insns covered by POOL.  */
4022 
4023 static void
s390_add_pool_insn(pool,insn)4024 s390_add_pool_insn (pool, insn)
4025      struct constant_pool *pool;
4026      rtx insn;
4027 {
4028   bitmap_set_bit (pool->insns, INSN_UID (insn));
4029 }
4030 
4031 /* Return pool out of POOL_LIST that covers INSN.  */
4032 
4033 static struct constant_pool *
s390_find_pool(pool_list,insn)4034 s390_find_pool (pool_list, insn)
4035      struct constant_pool *pool_list;
4036      rtx insn;
4037 {
4038   struct constant_pool *pool;
4039 
4040   for (pool = pool_list; pool; pool = pool->next)
4041     if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
4042       break;
4043 
4044   return pool;
4045 }
4046 
4047 /* Add constant VAL of mode MODE to the constant pool POOL.  */
4048 
4049 static void
s390_add_constant(pool,val,mode)4050 s390_add_constant (pool, val, mode)
4051      struct constant_pool *pool;
4052      rtx val;
4053      enum machine_mode mode;
4054 {
4055   struct constant *c;
4056   int i;
4057 
4058   for (i = 0; i < NR_C_MODES; i++)
4059     if (constant_modes[i] == mode)
4060       break;
4061   if (i == NR_C_MODES)
4062     abort ();
4063 
4064   for (c = pool->constants[i]; c != NULL; c = c->next)
4065     if (rtx_equal_p (val, c->value))
4066       break;
4067 
4068   if (c == NULL)
4069     {
4070       c = (struct constant *) xmalloc (sizeof *c);
4071       c->value = val;
4072       c->label = gen_label_rtx ();
4073       c->next = pool->constants[i];
4074       pool->constants[i] = c;
4075       pool->size += GET_MODE_SIZE (mode);
4076     }
4077 }
4078 
4079 /* Find constant VAL of mode MODE in the constant pool POOL.
4080    Return an RTX describing the distance from the start of
4081    the pool to the location of the new constant.  */
4082 
4083 static rtx
s390_find_constant(pool,val,mode)4084 s390_find_constant (pool, val, mode)
4085      struct constant_pool *pool;
4086      rtx val;
4087      enum machine_mode mode;
4088 {
4089   struct constant *c;
4090   rtx offset;
4091   int i;
4092 
4093   for (i = 0; i < NR_C_MODES; i++)
4094     if (constant_modes[i] == mode)
4095       break;
4096   if (i == NR_C_MODES)
4097     abort ();
4098 
4099   for (c = pool->constants[i]; c != NULL; c = c->next)
4100     if (rtx_equal_p (val, c->value))
4101       break;
4102 
4103   if (c == NULL)
4104     abort ();
4105 
4106   offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4107                                  gen_rtx_LABEL_REF (Pmode, pool->label));
4108   offset = gen_rtx_CONST (Pmode, offset);
4109   return offset;
4110 }
4111 
4112 /* Set 'anchor' flag in POOL.  */
4113 
4114 static void
s390_add_anchor(pool)4115 s390_add_anchor (pool)
4116      struct constant_pool *pool;
4117 {
4118   if (!pool->anchor)
4119     {
4120       pool->anchor = TRUE;
4121       pool->size += 4;
4122     }
4123 }
4124 
4125 /* Dump out the constants in POOL.  */
4126 
4127 static rtx
s390_dump_pool(pool)4128 s390_dump_pool (pool)
4129      struct constant_pool *pool;
4130 {
4131   struct constant *c;
4132   rtx insn;
4133   int i;
4134 
4135   /* Pool start insn switches to proper section
4136      and guarantees necessary alignment.  */
4137   if (TARGET_64BIT)
4138     insn = emit_insn_after (gen_pool_start_64 (), pool->pool_insn);
4139   else
4140     insn = emit_insn_after (gen_pool_start_31 (), pool->pool_insn);
4141   INSN_ADDRESSES_NEW (insn, -1);
4142 
4143   insn = emit_label_after (pool->label, insn);
4144   INSN_ADDRESSES_NEW (insn, -1);
4145 
4146   /* Emit anchor if we need one.  */
4147   if (pool->anchor)
4148     {
4149       rtx anchor = gen_rtx_LABEL_REF (VOIDmode, pool->label);
4150       anchor = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, anchor), 105);
4151       anchor = gen_rtx_CONST (VOIDmode, anchor);
4152       insn = emit_insn_after (gen_consttable_si (anchor), insn);
4153       INSN_ADDRESSES_NEW (insn, -1);
4154     }
4155 
4156   /* Dump constants in descending alignment requirement order,
4157      ensuring proper alignment for every constant.  */
4158   for (i = 0; i < NR_C_MODES; i++)
4159     for (c = pool->constants[i]; c; c = c->next)
4160       {
4161 	/* Convert 104 unspecs to pool-relative references.  */
4162 	rtx value = c->value;
4163 	if (GET_CODE (value) == CONST
4164 	    && GET_CODE (XEXP (value, 0)) == UNSPEC
4165 	    && XINT (XEXP (value, 0), 1) == 104
4166 	    && XVECLEN (XEXP (value, 0), 0) == 1)
4167 	  {
4168 	    value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
4169 	    			   gen_rtx_LABEL_REF (VOIDmode, pool->label));
4170 	    value = gen_rtx_CONST (VOIDmode, value);
4171 	  }
4172 
4173 	insn = emit_label_after (c->label, insn);
4174 	INSN_ADDRESSES_NEW (insn, -1);
4175 	insn = emit_insn_after (gen_consttable[i] (value), insn);
4176 	INSN_ADDRESSES_NEW (insn, -1);
4177       }
4178 
4179   /* Pool end insn switches back to previous section
4180      and guarantees necessary alignment.  */
4181   if (TARGET_64BIT)
4182     insn = emit_insn_after (gen_pool_end_64 (), insn);
4183   else
4184     insn = emit_insn_after (gen_pool_end_31 (), insn);
4185   INSN_ADDRESSES_NEW (insn, -1);
4186 
4187   insn = emit_barrier_after (insn);
4188   INSN_ADDRESSES_NEW (insn, -1);
4189 
4190   /* Remove placeholder insn.  */
4191   remove_insn (pool->pool_insn);
4192 
4193   return insn;
4194 }
4195 
4196 /* Free all memory used by POOL.  */
4197 
4198 static void
s390_free_pool(pool)4199 s390_free_pool (pool)
4200      struct constant_pool *pool;
4201 {
4202   int i;
4203 
4204   for (i = 0; i < NR_C_MODES; i++)
4205     {
4206       struct constant *c = pool->constants[i];
4207       while (c != NULL)
4208 	{
4209 	  struct constant *next = c->next;
4210 	  free (c);
4211 	  c = next;
4212 	}
4213     }
4214 
4215   BITMAP_XFREE (pool->insns);
4216   free (pool);
4217 }
4218 
4219 
4220 /* Chunkify the literal pool if required.
4221 
4222    Code generated by this routine is allowed to use
4223    TEMP_REG as temporary scratch register.  If this is
4224    done, TEMP_USED is set to true.  */
4225 
4226 #define S390_POOL_CHUNK_MIN	0xc00
4227 #define S390_POOL_CHUNK_MAX	0xe00
4228 
4229 static struct constant_pool *
s390_chunkify_start(temp_reg,temp_used)4230 s390_chunkify_start (temp_reg, temp_used)
4231      rtx temp_reg;
4232      bool *temp_used;
4233 {
4234   rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4235 
4236   struct constant_pool *curr_pool = NULL, *pool_list = NULL;
4237   int extra_size = 0;
4238   bitmap far_labels;
4239   rtx insn;
4240 
4241   rtx (*gen_reload_base) PARAMS ((rtx, rtx)) =
4242     TARGET_64BIT? gen_reload_base_64 : gen_reload_base_31;
4243 
4244 
4245   /* Do we need to chunkify the literal pool?  */
4246 
4247   if (get_pool_size () < S390_POOL_CHUNK_MAX)
4248     return NULL;
4249 
4250   /* We need correct insn addresses.  */
4251 
4252   shorten_branches (get_insns ());
4253 
4254   /* Scan all insns and move literals to pool chunks.
4255      Also, emit anchor reload insns before every insn that uses
4256      the literal pool base register as anchor pointer.  */
4257 
4258   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4259     {
4260       if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4261 	{
4262 	  rtx pool_ref = NULL_RTX;
4263 	  find_constant_pool_ref (PATTERN (insn), &pool_ref);
4264 	  if (pool_ref)
4265 	    {
4266 	      if (!curr_pool)
4267 		curr_pool = s390_start_pool (&pool_list, insn);
4268 
4269 	      s390_add_constant (curr_pool, get_pool_constant (pool_ref),
4270 					    get_pool_mode (pool_ref));
4271 	      s390_add_pool_insn (curr_pool, insn);
4272 	    }
4273 
4274 	  else if (!TARGET_64BIT && flag_pic
4275                    && find_base_register_ref (PATTERN (insn)))
4276 	    {
4277 	      rtx new = gen_reload_anchor (temp_reg, base_reg);
4278 	      new = emit_insn_before (new, insn);
4279 	      INSN_ADDRESSES_NEW (new, INSN_ADDRESSES (INSN_UID (insn)));
4280 	      extra_size += 8;
4281 	      *temp_used = 1;
4282 
4283 	      if (!curr_pool)
4284 		curr_pool = s390_start_pool (&pool_list, new);
4285 
4286 	      s390_add_anchor (curr_pool);
4287 	      s390_add_pool_insn (curr_pool, insn);
4288 	    }
4289 	}
4290 
4291       if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
4292 	if (curr_pool)
4293 	  s390_add_pool_insn (curr_pool, insn);
4294 
4295       if (!curr_pool
4296 	  || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
4297           || INSN_ADDRESSES (INSN_UID (insn)) == -1)
4298 	continue;
4299 
4300       if (TARGET_64BIT)
4301 	{
4302 	  if (curr_pool->size < S390_POOL_CHUNK_MAX)
4303 	    continue;
4304 
4305 	  s390_end_pool (curr_pool, NULL_RTX);
4306 	  curr_pool = NULL;
4307 	}
4308       else
4309 	{
4310           int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
4311 		 	   - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
4312 			 + extra_size;
4313 
4314 	  /* We will later have to insert base register reload insns.
4315 	     Those will have an effect on code size, which we need to
4316 	     consider here.  This calculation makes rather pessimistic
4317 	     worst-case assumptions.  */
4318 	  if (GET_CODE (insn) == CODE_LABEL)
4319 	    extra_size += 6;
4320 
4321 	  if (chunk_size < S390_POOL_CHUNK_MIN
4322 	      && curr_pool->size < S390_POOL_CHUNK_MIN)
4323 	    continue;
4324 
4325 	  /* Pool chunks can only be inserted after BARRIERs ...  */
4326 	  if (GET_CODE (insn) == BARRIER)
4327 	    {
4328 	      s390_end_pool (curr_pool, insn);
4329 	      curr_pool = NULL;
4330 	      extra_size = 0;
4331 	    }
4332 
4333 	  /* ... so if we don't find one in time, create one.  */
4334           else if ((chunk_size > S390_POOL_CHUNK_MAX
4335 	           || curr_pool->size > S390_POOL_CHUNK_MAX))
4336 	    {
4337               rtx label, jump, barrier;
4338 
4339 	      /* We can insert the barrier only after a 'real' insn.  */
4340 	      if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
4341 		continue;
4342 	      if (get_attr_length (insn) == 0)
4343 		continue;
4344 
4345 	      /* Don't separate insns created by s390_split_branches.  */
4346 	      if (GET_CODE (insn) == INSN
4347 		  && GET_CODE (PATTERN (insn)) == SET
4348 		  && rtx_equal_p (SET_DEST (PATTERN (insn)), temp_reg))
4349 		continue;
4350 
4351  	      label = gen_label_rtx ();
4352 	      jump = emit_jump_insn_after (gen_jump (label), insn);
4353 	      barrier = emit_barrier_after (jump);
4354 	      insn = emit_label_after (label, barrier);
4355 	      JUMP_LABEL (jump) = label;
4356 	      LABEL_NUSES (label) = 1;
4357 
4358 	      INSN_ADDRESSES_NEW (jump, -1);
4359 	      INSN_ADDRESSES_NEW (barrier, -1);
4360 	      INSN_ADDRESSES_NEW (insn, -1);
4361 
4362 	      s390_end_pool (curr_pool, barrier);
4363 	      curr_pool = NULL;
4364 	      extra_size = 0;
4365 	    }
4366 	}
4367     }
4368 
4369   if (curr_pool)
4370     s390_end_pool (curr_pool, NULL_RTX);
4371 
4372 
4373   /* Find all labels that are branched into
4374      from an insn belonging to a different chunk.  */
4375 
4376   far_labels = BITMAP_XMALLOC ();
4377 
4378   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4379     {
4380       /* Labels marked with LABEL_PRESERVE_P can be target
4381 	 of non-local jumps, so we have to mark them.
4382 	 The same holds for named labels.
4383 
4384 	 Don't do that, however, if it is the label before
4385 	 a jump table.  */
4386 
4387       if (GET_CODE (insn) == CODE_LABEL
4388 	  && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
4389 	{
4390 	  rtx vec_insn = next_real_insn (insn);
4391 	  rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
4392 			PATTERN (vec_insn) : NULL_RTX;
4393 	  if (!vec_pat
4394 	      || !(GET_CODE (vec_pat) == ADDR_VEC
4395 		   || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4396 	    bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
4397 	}
4398 
4399       /* If we have a direct jump (conditional or unconditional)
4400 	 or a casesi jump, check all potential targets.  */
4401       else if (GET_CODE (insn) == JUMP_INSN)
4402 	{
4403           rtx pat = PATTERN (insn);
4404 	  if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4405 	    pat = XVECEXP (pat, 0, 0);
4406 
4407           if (GET_CODE (pat) == SET)
4408             {
4409 	      rtx label = JUMP_LABEL (insn);
4410 	      if (label)
4411 		{
4412 	          if (s390_find_pool (pool_list, label)
4413 		      != s390_find_pool (pool_list, insn))
4414 		    bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4415 		}
4416             }
4417 	  else if (GET_CODE (pat) == PARALLEL
4418 		   && XVECLEN (pat, 0) == 2
4419 		   && GET_CODE (XVECEXP (pat, 0, 0)) == SET
4420 		   && GET_CODE (XVECEXP (pat, 0, 1)) == USE
4421 		   && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
4422 	    {
4423 	      /* Find the jump table used by this casesi jump.  */
4424 	      rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
4425 	      rtx vec_insn = next_real_insn (vec_label);
4426 	      rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
4427 			    PATTERN (vec_insn) : NULL_RTX;
4428 	      if (vec_pat
4429 		  && (GET_CODE (vec_pat) == ADDR_VEC
4430 		      || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4431 		{
4432 		  int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
4433 
4434 		  for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
4435 		    {
4436 		      rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
4437 
4438 		      if (s390_find_pool (pool_list, label)
4439 			  != s390_find_pool (pool_list, insn))
4440 			bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4441 		    }
4442 		}
4443 	    }
4444         }
4445     }
4446 
4447   /* Insert base register reload insns before every pool.  */
4448 
4449   for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4450     {
4451       rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
4452       rtx insn = curr_pool->first_insn;
4453       INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
4454     }
4455 
4456   /* Insert base register reload insns at every far label.  */
4457 
4458   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4459     if (GET_CODE (insn) == CODE_LABEL
4460         && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
4461       {
4462 	struct constant_pool *pool = s390_find_pool (pool_list, insn);
4463 	if (pool)
4464 	  {
4465 	    rtx new_insn = gen_reload_base (base_reg, pool->label);
4466 	    INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
4467 	  }
4468       }
4469 
4470 
4471   BITMAP_XFREE (far_labels);
4472 
4473 
4474   /* Recompute insn addresses.  */
4475 
4476   init_insn_lengths ();
4477   shorten_branches (get_insns ());
4478 
4479   return pool_list;
4480 }
4481 
4482 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4483    After we have decided to use this list, finish implementing
4484    all changes to the current function as required.
4485 
4486    Code generated by this routine is allowed to use
4487    TEMP_REG as temporary scratch register.  */
4488 
4489 static void
s390_chunkify_finish(pool_list,temp_reg)4490 s390_chunkify_finish (pool_list, temp_reg)
4491      struct constant_pool *pool_list;
4492      rtx temp_reg;
4493 {
4494   rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4495   struct constant_pool *curr_pool = NULL;
4496   rtx insn;
4497 
4498 
4499   /* Replace all literal pool references.  */
4500 
4501   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4502     {
4503       curr_pool = s390_find_pool (pool_list, insn);
4504       if (!curr_pool)
4505 	continue;
4506 
4507       if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4508         {
4509           rtx addr, pool_ref = NULL_RTX;
4510           find_constant_pool_ref (PATTERN (insn), &pool_ref);
4511           if (pool_ref)
4512             {
4513               addr = s390_find_constant (curr_pool, get_pool_constant (pool_ref),
4514                                                     get_pool_mode (pool_ref));
4515               addr = gen_rtx_PLUS (Pmode, base_reg, addr);
4516               replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
4517               INSN_CODE (insn) = -1;
4518             }
4519 
4520 	  else if (!TARGET_64BIT && flag_pic
4521                    && find_base_register_ref (PATTERN (insn)))
4522 	    {
4523 	      replace_base_register_ref (&PATTERN (insn), temp_reg);
4524 	    }
4525         }
4526     }
4527 
4528   /* Dump out all literal pools.  */
4529 
4530   for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4531     s390_dump_pool (curr_pool);
4532 
4533   /* Free pool list.  */
4534 
4535   while (pool_list)
4536     {
4537       struct constant_pool *next = pool_list->next;
4538       s390_free_pool (pool_list);
4539       pool_list = next;
4540     }
4541 }
4542 
4543 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4544    We have decided we cannot use this list, so revert all changes
4545    to the current function that were done by s390_chunkify_start.  */
4546 
4547 static void
s390_chunkify_cancel(pool_list)4548 s390_chunkify_cancel (pool_list)
4549      struct constant_pool *pool_list;
4550 {
4551   struct constant_pool *curr_pool = NULL;
4552   rtx insn;
4553 
4554   /* Remove all pool placeholder insns.  */
4555 
4556   for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4557     {
4558       /* Did we insert an extra barrier?  Remove it.  */
4559       rtx barrier = PREV_INSN (curr_pool->pool_insn);
4560       rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
4561       rtx label = NEXT_INSN (curr_pool->pool_insn);
4562 
4563       if (jump && GET_CODE (jump) == JUMP_INSN
4564 	  && barrier && GET_CODE (barrier) == BARRIER
4565 	  && label && GET_CODE (label) == CODE_LABEL
4566 	  && GET_CODE (PATTERN (jump)) == SET
4567 	  && SET_DEST (PATTERN (jump)) == pc_rtx
4568 	  && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
4569 	  && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
4570 	{
4571 	  remove_insn (jump);
4572 	  remove_insn (barrier);
4573 	  remove_insn (label);
4574 	}
4575 
4576       remove_insn (curr_pool->pool_insn);
4577     }
4578 
4579   /* Remove all base/anchor register reload insns.  */
4580 
4581   for (insn = get_insns (); insn; )
4582     {
4583       rtx next_insn = NEXT_INSN (insn);
4584 
4585       if (GET_CODE (insn) == INSN
4586 	  && GET_CODE (PATTERN (insn)) == SET
4587 	  && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
4588 	  && (XINT (SET_SRC (PATTERN (insn)), 1) == 210
4589 	      || XINT (SET_SRC (PATTERN (insn)), 1) == 211))
4590 	remove_insn (insn);
4591 
4592       insn = next_insn;
4593     }
4594 
4595   /* Free pool list.  */
4596 
4597   while (pool_list)
4598     {
4599       struct constant_pool *next = pool_list->next;
4600       s390_free_pool (pool_list);
4601       pool_list = next;
4602     }
4603 }
4604 
4605 
4606 /* Index of constant pool chunk that is currently being processed.
4607    Set to -1 before function output has started.  */
4608 int s390_pool_count = -1;
4609 
4610 /* Number of elements of current constant pool.  */
4611 int s390_nr_constants;
4612 
4613 /* Output main constant pool to stdio stream FILE.  */
4614 
4615 void
s390_output_constant_pool(start_label,end_label)4616 s390_output_constant_pool (start_label, end_label)
4617      rtx start_label;
4618      rtx end_label;
4619 {
4620   if (TARGET_64BIT)
4621     {
4622       readonly_data_section ();
4623       ASM_OUTPUT_ALIGN (asm_out_file, 3);
4624       ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
4625                                  CODE_LABEL_NUMBER (start_label));
4626     }
4627   else
4628     {
4629       ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
4630                                  CODE_LABEL_NUMBER (start_label));
4631       ASM_OUTPUT_ALIGN (asm_out_file, 2);
4632     }
4633 
4634   s390_pool_count = 0;
4635   output_constant_pool (current_function_name, current_function_decl);
4636   s390_pool_count = -1;
4637   if (TARGET_64BIT)
4638     function_section (current_function_decl);
4639   else
4640     {
4641       ASM_OUTPUT_ALIGN (asm_out_file, 1);
4642       ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (end_label));
4643     }
4644 }
4645 
4646 /* Rework the prolog/epilog to avoid saving/restoring
4647    registers unnecessarily.  If TEMP_REGNO is nonnegative,
4648    it specifies the number of a caller-saved register used
4649    as temporary scratch register by code emitted during
4650    machine dependent reorg.  */
4651 
4652 static void
s390_optimize_prolog(temp_regno)4653 s390_optimize_prolog (temp_regno)
4654      int temp_regno;
4655 {
4656   int save_first, save_last, restore_first, restore_last;
4657   int i, j;
4658   rtx insn, new_insn, next_insn;
4659 
4660   /* Recompute regs_ever_live data for special registers.  */
4661   regs_ever_live[BASE_REGISTER] = 0;
4662   regs_ever_live[RETURN_REGNUM] = 0;
4663   regs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
4664 
4665   /* If there is (possibly) any pool entry, we need to
4666      load the base register.
4667      ??? FIXME: this should be more precise.  */
4668   if (get_pool_size ())
4669     regs_ever_live[BASE_REGISTER] = 1;
4670 
4671   /* In non-leaf functions, the prolog/epilog code relies
4672      on RETURN_REGNUM being saved in any case.  */
4673   if (!current_function_is_leaf)
4674     regs_ever_live[RETURN_REGNUM] = 1;
4675 
4676   /* We need to save/restore the temporary register.  */
4677   if (temp_regno >= 0)
4678     regs_ever_live[temp_regno] = 1;
4679 
4680 
4681   /* Find first and last gpr to be saved.  */
4682 
4683   for (i = 6; i < 16; i++)
4684     if (regs_ever_live[i])
4685       if (!global_regs[i]
4686 	  || i == STACK_POINTER_REGNUM
4687           || i == RETURN_REGNUM
4688           || i == BASE_REGISTER
4689           || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
4690 	break;
4691 
4692   for (j = 15; j > i; j--)
4693     if (regs_ever_live[j])
4694       if (!global_regs[j]
4695 	  || j == STACK_POINTER_REGNUM
4696           || j == RETURN_REGNUM
4697           || j == BASE_REGISTER
4698           || (flag_pic && j == (int)PIC_OFFSET_TABLE_REGNUM))
4699 	break;
4700 
4701   if (i == 16)
4702     {
4703       /* Nothing to save/restore.  */
4704       save_first = restore_first = -1;
4705       save_last = restore_last = -1;
4706     }
4707   else
4708     {
4709       /* Save/restore from i to j.  */
4710       save_first = restore_first = i;
4711       save_last = restore_last = j;
4712     }
4713 
4714   /* Varargs functions need to save gprs 2 to 6.  */
4715   if (current_function_stdarg)
4716     {
4717       save_first = 2;
4718       if (save_last < 6)
4719         save_last = 6;
4720     }
4721 
4722 
4723   /* If all special registers are in fact used, there's nothing we
4724      can do, so no point in walking the insn list.  */
4725   if (i <= BASE_REGISTER && j >= BASE_REGISTER
4726       && i <= RETURN_REGNUM && j >= RETURN_REGNUM)
4727     return;
4728 
4729 
4730   /* Search for prolog/epilog insns and replace them.  */
4731 
4732   for (insn = get_insns (); insn; insn = next_insn)
4733     {
4734       int first, last, off;
4735       rtx set, base, offset;
4736 
4737       next_insn = NEXT_INSN (insn);
4738 
4739       if (GET_CODE (insn) != INSN)
4740 	continue;
4741       if (GET_CODE (PATTERN (insn)) != PARALLEL)
4742 	continue;
4743 
4744       if (store_multiple_operation (PATTERN (insn), VOIDmode))
4745 	{
4746 	  set = XVECEXP (PATTERN (insn), 0, 0);
4747 	  first = REGNO (SET_SRC (set));
4748 	  last = first + XVECLEN (PATTERN (insn), 0) - 1;
4749 	  offset = const0_rtx;
4750 	  base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
4751 	  off = INTVAL (offset) - first * UNITS_PER_WORD;
4752 
4753 	  if (GET_CODE (base) != REG || off < 0)
4754 	    continue;
4755 	  if (first > BASE_REGISTER && first > RETURN_REGNUM)
4756 	    continue;
4757 	  if (last < BASE_REGISTER && last < RETURN_REGNUM)
4758 	    continue;
4759 
4760 	  if (save_first != -1)
4761 	    {
4762 	      new_insn = save_gprs (base, off, save_first, save_last);
4763 	      new_insn = emit_insn_before (new_insn, insn);
4764 	      INSN_ADDRESSES_NEW (new_insn, -1);
4765 	    }
4766 
4767 	  remove_insn (insn);
4768 	}
4769 
4770       if (load_multiple_operation (PATTERN (insn), VOIDmode))
4771 	{
4772 	  set = XVECEXP (PATTERN (insn), 0, 0);
4773 	  first = REGNO (SET_DEST (set));
4774 	  last = first + XVECLEN (PATTERN (insn), 0) - 1;
4775 	  offset = const0_rtx;
4776 	  base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
4777 	  off = INTVAL (offset) - first * UNITS_PER_WORD;
4778 
4779 	  if (GET_CODE (base) != REG || off < 0)
4780 	    continue;
4781 	  if (first > BASE_REGISTER && first > RETURN_REGNUM)
4782 	    continue;
4783 	  if (last < BASE_REGISTER && last < RETURN_REGNUM)
4784 	    continue;
4785 
4786 	  if (restore_first != -1)
4787 	    {
4788 	      new_insn = restore_gprs (base, off, restore_first, restore_last);
4789 	      new_insn = emit_insn_before (new_insn, insn);
4790 	      INSN_ADDRESSES_NEW (new_insn, -1);
4791 	    }
4792 
4793 	  remove_insn (insn);
4794 	}
4795     }
4796 }
4797 
4798 /* Check whether any insn in the function makes use of the original
4799    value of RETURN_REG (e.g. for __builtin_return_address).
4800    If so, insert an insn reloading that value.
4801 
4802    Return true if any such insn was found.  */
4803 
4804 static bool
s390_fixup_clobbered_return_reg(return_reg)4805 s390_fixup_clobbered_return_reg (return_reg)
4806     rtx return_reg;
4807 {
4808   bool replacement_done = 0;
4809   rtx insn;
4810 
4811   /* If we never called __builtin_return_address, register 14
4812      might have been used as temp during the prolog; we do
4813      not want to touch those uses.  */
4814   if (!has_hard_reg_initial_val (Pmode, REGNO (return_reg)))
4815     return false;
4816 
4817   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4818     {
4819       rtx reg, off, new_insn;
4820 
4821       if (GET_CODE (insn) != INSN)
4822 	continue;
4823       if (!reg_referenced_p (return_reg, PATTERN (insn)))
4824 	continue;
4825       if (GET_CODE (PATTERN (insn)) == PARALLEL
4826 	  && store_multiple_operation (PATTERN (insn), VOIDmode))
4827 	continue;
4828 
4829       if (frame_pointer_needed)
4830 	reg = hard_frame_pointer_rtx;
4831       else
4832 	reg = stack_pointer_rtx;
4833 
4834       off = GEN_INT (cfun->machine->frame_size + REGNO (return_reg) * UNITS_PER_WORD);
4835       if (INTVAL (off) >= 4096)
4836 	{
4837 	  off = force_const_mem (Pmode, off);
4838 	  new_insn = gen_rtx_SET (Pmode, return_reg, off);
4839 	  new_insn = emit_insn_before (new_insn, insn);
4840 	  INSN_ADDRESSES_NEW (new_insn, -1);
4841 	  off = return_reg;
4842 	}
4843 
4844       new_insn = gen_rtx_MEM (Pmode, gen_rtx_PLUS (Pmode, reg, off));
4845       new_insn = gen_rtx_SET (Pmode, return_reg, new_insn);
4846       new_insn = emit_insn_before (new_insn, insn);
4847       INSN_ADDRESSES_NEW (new_insn, -1);
4848 
4849       replacement_done = 1;
4850     }
4851 
4852   return replacement_done;
4853 }
4854 
4855 /* Perform machine-dependent processing.  */
4856 
4857 void
s390_machine_dependent_reorg(first)4858 s390_machine_dependent_reorg (first)
4859      rtx first ATTRIBUTE_UNUSED;
4860 {
4861   bool fixed_up_clobbered_return_reg = 0;
4862   rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4863   bool temp_used = 0;
4864 
4865   /* Make sure all splits have been performed; splits after
4866      machine_dependent_reorg might confuse insn length counts.  */
4867   split_all_insns_noflow ();
4868 
4869 
4870   /* There are two problematic situations we need to correct:
4871 
4872      - the literal pool might be > 4096 bytes in size, so that
4873        some of its elements cannot be directly accessed
4874 
4875      - a branch target might be > 64K away from the branch, so that
4876        it is not possible to use a PC-relative instruction.
4877 
4878      To fix those, we split the single literal pool into multiple
4879      pool chunks, reloading the pool base register at various
4880      points throughout the function to ensure it always points to
4881      the pool chunk the following code expects, and / or replace
4882      PC-relative branches by absolute branches.
4883 
4884      However, the two problems are interdependent: splitting the
4885      literal pool can move a branch further away from its target,
4886      causing the 64K limit to overflow, and on the other hand,
4887      replacing a PC-relative branch by an absolute branch means
4888      we need to put the branch target address into the literal
4889      pool, possibly causing it to overflow.
4890 
4891      So, we loop trying to fix up both problems until we manage
4892      to satisfy both conditions at the same time.  Note that the
4893      loop is guaranteed to terminate as every pass of the loop
4894      strictly decreases the total number of PC-relative branches
4895      in the function.  (This is not completely true as there
4896      might be branch-over-pool insns introduced by chunkify_start.
4897      Those never need to be split however.)  */
4898 
4899   for (;;)
4900     {
4901       struct constant_pool *pool_list;
4902 
4903       /* Try to chunkify the literal pool.  */
4904       pool_list = s390_chunkify_start (temp_reg, &temp_used);
4905 
4906       /* Split out-of-range branches.  If this has created new
4907 	 literal pool entries, cancel current chunk list and
4908 	 recompute it.  */
4909       if (s390_split_branches (temp_reg, &temp_used))
4910         {
4911           if (pool_list)
4912             s390_chunkify_cancel (pool_list);
4913 
4914           continue;
4915         }
4916 
4917       /* Check whether we have clobbered a use of the return
4918 	 register (e.g. for __builtin_return_address).  If so,
4919 	 add insns reloading the register where necessary.  */
4920       if (temp_used && !fixed_up_clobbered_return_reg
4921 	  && s390_fixup_clobbered_return_reg (temp_reg))
4922 	{
4923 	  fixed_up_clobbered_return_reg = 1;
4924 
4925 	  /* The fixup insns might have caused a jump to overflow.  */
4926 	  if (pool_list)
4927 	    s390_chunkify_cancel (pool_list);
4928 
4929 	  continue;
4930 	}
4931 
4932       /* If we made it up to here, both conditions are satisfied.
4933 	 Finish up pool chunkification if required.  */
4934       if (pool_list)
4935 	s390_chunkify_finish (pool_list, temp_reg);
4936 
4937       break;
4938     }
4939 
4940   s390_optimize_prolog (temp_used? RETURN_REGNUM : -1);
4941 }
4942 
4943 
4944 /* Return an RTL expression representing the value of the return address
4945    for the frame COUNT steps up from the current frame.  FRAME is the
4946    frame pointer of that frame.  */
4947 
4948 rtx
s390_return_addr_rtx(count,frame)4949 s390_return_addr_rtx (count, frame)
4950      int count;
4951      rtx frame;
4952 {
4953   rtx addr;
4954 
4955   /* For the current frame, we use the initial value of RETURN_REGNUM.
4956      This works both in leaf and non-leaf functions.  */
4957 
4958   if (count == 0)
4959     return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
4960 
4961   /* For frames farther back, we read the stack slot where the
4962      corresponding RETURN_REGNUM value was saved.  */
4963 
4964   addr = plus_constant (frame, RETURN_REGNUM * UNITS_PER_WORD);
4965   addr = memory_address (Pmode, addr);
4966   return gen_rtx_MEM (Pmode, addr);
4967 }
4968 
4969 /* Find first call clobbered register unsused in a function.
4970    This could be used as base register in a leaf function
4971    or for holding the return address before epilogue.  */
4972 
4973 static int
find_unused_clobbered_reg()4974 find_unused_clobbered_reg ()
4975 {
4976   int i;
4977   for (i = 0; i < 6; i++)
4978     if (!regs_ever_live[i])
4979       return i;
4980   return 0;
4981 }
4982 
4983 /* Fill FRAME with info about frame of current function.  */
4984 
4985 static void
s390_frame_info()4986 s390_frame_info ()
4987 {
4988   char gprs_ever_live[16];
4989   int i, j;
4990   HOST_WIDE_INT fsize = get_frame_size ();
4991 
4992   if (fsize > 0x7fff0000)
4993     fatal_error ("Total size of local variables exceeds architecture limit.");
4994 
4995   /* fprs 8 - 15 are caller saved for 64 Bit ABI.  */
4996   cfun->machine->save_fprs_p = 0;
4997   if (TARGET_64BIT)
4998     for (i = 24; i < 32; i++)
4999       if (regs_ever_live[i] && !global_regs[i])
5000 	{
5001           cfun->machine->save_fprs_p = 1;
5002 	  break;
5003 	}
5004 
5005   cfun->machine->frame_size = fsize + cfun->machine->save_fprs_p * 64;
5006 
5007   /* Does function need to setup frame and save area.  */
5008 
5009   if (! current_function_is_leaf
5010       || cfun->machine->frame_size > 0
5011       || current_function_calls_alloca
5012       || current_function_stdarg)
5013     cfun->machine->frame_size += STARTING_FRAME_OFFSET;
5014 
5015   /* Find first and last gpr to be saved.  Note that at this point,
5016      we assume the return register and the base register always
5017      need to be saved.  This is done because the usage of these
5018      register might change even after the prolog was emitted.
5019      If it turns out later that we really don't need them, the
5020      prolog/epilog code is modified again.  */
5021 
5022   for (i = 0; i < 16; i++)
5023     gprs_ever_live[i] = regs_ever_live[i] && !global_regs[i];
5024 
5025   if (flag_pic)
5026     gprs_ever_live[PIC_OFFSET_TABLE_REGNUM] =
5027     regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
5028   gprs_ever_live[BASE_REGISTER] = 1;
5029   gprs_ever_live[RETURN_REGNUM] = 1;
5030   gprs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
5031 
5032   for (i = 6; i < 16; i++)
5033     if (gprs_ever_live[i])
5034       break;
5035 
5036   for (j = 15; j > i; j--)
5037     if (gprs_ever_live[j])
5038       break;
5039 
5040 
5041   /* Save / Restore from gpr i to j.  */
5042   cfun->machine->first_save_gpr = i;
5043   cfun->machine->first_restore_gpr = i;
5044   cfun->machine->last_save_gpr  = j;
5045 
5046   /* Varargs functions need to save gprs 2 to 6.  */
5047   if (current_function_stdarg)
5048     cfun->machine->first_save_gpr = 2;
5049 }
5050 
5051 /* Return offset between argument pointer and frame pointer
5052    initially after prologue.  */
5053 
5054 int
s390_arg_frame_offset()5055 s390_arg_frame_offset ()
5056 {
5057   HOST_WIDE_INT fsize = get_frame_size ();
5058   int save_fprs_p, i;
5059 
5060   /* fprs 8 - 15 are caller saved for 64 Bit ABI.  */
5061   save_fprs_p = 0;
5062   if (TARGET_64BIT)
5063     for (i = 24; i < 32; i++)
5064       if (regs_ever_live[i] && !global_regs[i])
5065 	{
5066           save_fprs_p = 1;
5067 	  break;
5068 	}
5069 
5070   fsize = fsize + save_fprs_p * 64;
5071 
5072   /* Does function need to setup frame and save area.  */
5073 
5074   if (! current_function_is_leaf
5075       || fsize > 0
5076       || current_function_calls_alloca
5077       || current_function_stdarg)
5078     fsize += STARTING_FRAME_OFFSET;
5079   return fsize + STACK_POINTER_OFFSET;
5080 }
5081 
5082 /* Emit insn to save fpr REGNUM at offset OFFSET relative
5083    to register BASE.  Return generated insn.  */
5084 
5085 static rtx
save_fpr(base,offset,regnum)5086 save_fpr (base, offset, regnum)
5087      rtx base;
5088      int offset;
5089      int regnum;
5090 {
5091   rtx addr;
5092   addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5093   set_mem_alias_set (addr, s390_sr_alias_set);
5094 
5095   return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
5096 }
5097 
5098 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
5099    to register BASE.  Return generated insn.  */
5100 
5101 static rtx
restore_fpr(base,offset,regnum)5102 restore_fpr (base, offset, regnum)
5103      rtx base;
5104      int offset;
5105      int regnum;
5106 {
5107   rtx addr;
5108   addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5109   set_mem_alias_set (addr, s390_sr_alias_set);
5110 
5111   return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
5112 }
5113 
5114 /* Generate insn to save registers FIRST to LAST into
5115    the register save area located at offset OFFSET
5116    relative to register BASE.  */
5117 
5118 static rtx
save_gprs(base,offset,first,last)5119 save_gprs (base, offset, first, last)
5120      rtx base;
5121      int offset;
5122      int first;
5123      int last;
5124 {
5125   rtx addr, insn, note;
5126   int i;
5127 
5128   addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5129   addr = gen_rtx_MEM (Pmode, addr);
5130   set_mem_alias_set (addr, s390_sr_alias_set);
5131 
5132   /* Special-case single register.  */
5133   if (first == last)
5134     {
5135       if (TARGET_64BIT)
5136         insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
5137       else
5138         insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
5139 
5140       RTX_FRAME_RELATED_P (insn) = 1;
5141       return insn;
5142     }
5143 
5144 
5145   insn = gen_store_multiple (addr,
5146 			     gen_rtx_REG (Pmode, first),
5147 			     GEN_INT (last - first + 1));
5148 
5149 
5150   /* We need to set the FRAME_RELATED flag on all SETs
5151      inside the store-multiple pattern.
5152 
5153      However, we must not emit DWARF records for registers 2..5
5154      if they are stored for use by variable arguments ...
5155 
5156      ??? Unfortunately, it is not enough to simply not the the
5157      FRAME_RELATED flags for those SETs, because the first SET
5158      of the PARALLEL is always treated as if it had the flag
5159      set, even if it does not.  Therefore we emit a new pattern
5160      without those registers as REG_FRAME_RELATED_EXPR note.  */
5161 
5162   if (first >= 6)
5163     {
5164       rtx pat = PATTERN (insn);
5165 
5166       for (i = 0; i < XVECLEN (pat, 0); i++)
5167 	if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
5168 	  RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
5169 
5170       RTX_FRAME_RELATED_P (insn) = 1;
5171     }
5172   else if (last >= 6)
5173     {
5174       addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
5175       note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
5176 				 gen_rtx_REG (Pmode, 6),
5177 				 GEN_INT (last - 6 + 1));
5178       note = PATTERN (note);
5179 
5180       REG_NOTES (insn) =
5181 	gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5182 			   note, REG_NOTES (insn));
5183 
5184       for (i = 0; i < XVECLEN (note, 0); i++)
5185 	if (GET_CODE (XVECEXP (note, 0, i)) == SET)
5186 	  RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
5187 
5188       RTX_FRAME_RELATED_P (insn) = 1;
5189     }
5190 
5191   return insn;
5192 }
5193 
5194 /* Generate insn to restore registers FIRST to LAST from
5195    the register save area located at offset OFFSET
5196    relative to register BASE.  */
5197 
5198 static rtx
restore_gprs(base,offset,first,last)5199 restore_gprs (base, offset, first, last)
5200      rtx base;
5201      int offset;
5202      int first;
5203      int last;
5204 {
5205   rtx addr, insn;
5206 
5207   addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5208   addr = gen_rtx_MEM (Pmode, addr);
5209   set_mem_alias_set (addr, s390_sr_alias_set);
5210 
5211   /* Special-case single register.  */
5212   if (first == last)
5213     {
5214       if (TARGET_64BIT)
5215         insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
5216       else
5217         insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
5218 
5219       return insn;
5220     }
5221 
5222   insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
5223 			    addr,
5224 			    GEN_INT (last - first + 1));
5225   return insn;
5226 }
5227 
5228 /* Expand the prologue into a bunch of separate insns.  */
5229 
5230 void
s390_emit_prologue()5231 s390_emit_prologue ()
5232 {
5233   rtx insn, addr;
5234   rtx temp_reg;
5235   rtx pool_start_label, pool_end_label;
5236   int i;
5237 
5238   /* Compute frame_info.  */
5239 
5240   s390_frame_info ();
5241 
5242   /* Choose best register to use for temp use within prologue.  */
5243 
5244   if (!current_function_is_leaf
5245       && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
5246       && get_pool_size () < S390_POOL_CHUNK_MAX / 2)
5247     temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5248   else
5249     temp_reg = gen_rtx_REG (Pmode, 1);
5250 
5251   /* Save call saved gprs.  */
5252 
5253   insn = save_gprs (stack_pointer_rtx, 0,
5254 		    cfun->machine->first_save_gpr, cfun->machine->last_save_gpr);
5255   emit_insn (insn);
5256 
5257   /* Dump constant pool and set constant pool register.  */
5258 
5259   pool_start_label = gen_label_rtx();
5260   pool_end_label = gen_label_rtx();
5261   cfun->machine->literal_pool_label = pool_start_label;
5262 
5263   if (TARGET_64BIT)
5264     insn = emit_insn (gen_literal_pool_64 (gen_rtx_REG (Pmode, BASE_REGISTER),
5265                  			   pool_start_label, pool_end_label));
5266   else
5267     insn = emit_insn (gen_literal_pool_31 (gen_rtx_REG (Pmode, BASE_REGISTER),
5268 					     pool_start_label, pool_end_label));
5269 
5270   /* Save fprs for variable args.  */
5271 
5272   if (current_function_stdarg)
5273     {
5274       /* Save fpr 0 and 2.  */
5275 
5276       save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
5277       save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
5278 
5279       if (TARGET_64BIT)
5280 	{
5281 	  /* Save fpr 4 and 6.  */
5282 
5283 	  save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
5284 	  save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
5285 	}
5286     }
5287 
5288   /* Save fprs 4 and 6 if used (31 bit ABI).  */
5289 
5290   if (!TARGET_64BIT)
5291     {
5292       /* Save fpr 4 and 6.  */
5293       if (regs_ever_live[18] && !global_regs[18])
5294 	{
5295 	  insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
5296 	  RTX_FRAME_RELATED_P (insn) = 1;
5297 	}
5298       if (regs_ever_live[19] && !global_regs[19])
5299 	{
5300 	  insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
5301 	  RTX_FRAME_RELATED_P (insn) = 1;
5302 	}
5303     }
5304 
5305   /* Decrement stack pointer.  */
5306 
5307   if (cfun->machine->frame_size > 0)
5308     {
5309       rtx frame_off = GEN_INT (-cfun->machine->frame_size);
5310 
5311       /* Save incoming stack pointer into temp reg.  */
5312 
5313       if (TARGET_BACKCHAIN || cfun->machine->save_fprs_p)
5314 	{
5315 	  insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
5316 	}
5317 
5318       /* Substract frame size from stack pointer.  */
5319 
5320       frame_off = GEN_INT (-cfun->machine->frame_size);
5321       if (!CONST_OK_FOR_LETTER_P (-cfun->machine->frame_size, 'K'))
5322 	frame_off = force_const_mem (Pmode, frame_off);
5323 
5324       insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
5325       RTX_FRAME_RELATED_P (insn) = 1;
5326       REG_NOTES (insn) =
5327 	gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5328 			   gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5329 				   gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5330 			           GEN_INT (-cfun->machine->frame_size))),
5331 			   REG_NOTES (insn));
5332 
5333       /* Set backchain.  */
5334 
5335       if (TARGET_BACKCHAIN)
5336 	{
5337 	  addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
5338 	  set_mem_alias_set (addr, s390_sr_alias_set);
5339 	  insn = emit_insn (gen_move_insn (addr, temp_reg));
5340 	}
5341 
5342       /* If we support asynchronous exceptions (e.g. for Java),
5343 	 we need to make sure the backchain pointer is set up
5344 	 before any possibly trapping memory access.  */
5345 
5346       if (TARGET_BACKCHAIN && flag_non_call_exceptions)
5347 	{
5348 	  addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
5349 	  emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
5350 	}
5351     }
5352 
5353   /* Save fprs 8 - 15 (64 bit ABI).  */
5354 
5355   if (cfun->machine->save_fprs_p)
5356     {
5357       insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
5358 
5359       for (i = 24; i < 32; i++)
5360 	if (regs_ever_live[i] && !global_regs[i])
5361 	  {
5362 	    rtx addr = plus_constant (stack_pointer_rtx,
5363 				      cfun->machine->frame_size - 64 + (i-24)*8);
5364 
5365 	    insn = save_fpr (temp_reg, (i-24)*8, i);
5366 	    RTX_FRAME_RELATED_P (insn) = 1;
5367 	    REG_NOTES (insn) =
5368 	      gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5369 		gen_rtx_SET (VOIDmode,
5370 			     gen_rtx_MEM (DFmode, addr),
5371 			     gen_rtx_REG (DFmode, i)),
5372 		REG_NOTES (insn));
5373 	  }
5374     }
5375 
5376   /* Set frame pointer, if needed.  */
5377 
5378   if (frame_pointer_needed)
5379     {
5380       insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
5381       RTX_FRAME_RELATED_P (insn) = 1;
5382     }
5383 
5384   /* Set up got pointer, if needed.  */
5385 
5386   if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5387     {
5388       rtx got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
5389       SYMBOL_REF_FLAG (got_symbol) = 1;
5390 
5391       if (TARGET_64BIT)
5392 	{
5393 	  insn = emit_insn (gen_movdi (pic_offset_table_rtx,
5394 				       got_symbol));
5395 
5396           /* It can happen that the GOT pointer isn't really needed ...  */
5397           REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5398                                                REG_NOTES (insn));
5399 	}
5400       else
5401 	{
5402           got_symbol = gen_rtx_UNSPEC (VOIDmode,
5403 				       gen_rtvec (1, got_symbol), 100);
5404           got_symbol = gen_rtx_CONST (VOIDmode, got_symbol);
5405 	  got_symbol = force_const_mem (Pmode, got_symbol);
5406 	  insn = emit_move_insn (pic_offset_table_rtx,
5407 				 got_symbol);
5408           REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5409                                                REG_NOTES (insn));
5410 
5411           got_symbol = gen_rtx_REG (Pmode, BASE_REGISTER);
5412           got_symbol = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol), 101);
5413           got_symbol = gen_rtx_PLUS (Pmode, got_symbol, pic_offset_table_rtx);
5414 	  insn = emit_move_insn (pic_offset_table_rtx, got_symbol);
5415           REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5416                                                REG_NOTES (insn));
5417 	}
5418     }
5419 }
5420 
5421 /* Expand the epilogue into a bunch of separate insns.  */
5422 
5423 void
s390_emit_epilogue()5424 s390_emit_epilogue ()
5425 {
5426   rtx frame_pointer, return_reg;
5427   int area_bottom, area_top, offset = 0;
5428   rtvec p;
5429 
5430   /* Check whether to use frame or stack pointer for restore.  */
5431 
5432   frame_pointer = frame_pointer_needed ?
5433     hard_frame_pointer_rtx : stack_pointer_rtx;
5434 
5435   /* Compute which parts of the save area we need to access.  */
5436 
5437   if (cfun->machine->first_restore_gpr != -1)
5438     {
5439       area_bottom = cfun->machine->first_restore_gpr * UNITS_PER_WORD;
5440       area_top = (cfun->machine->last_save_gpr + 1) * UNITS_PER_WORD;
5441     }
5442   else
5443     {
5444       area_bottom = INT_MAX;
5445       area_top = INT_MIN;
5446     }
5447 
5448   if (TARGET_64BIT)
5449     {
5450       if (cfun->machine->save_fprs_p)
5451 	{
5452 	  if (area_bottom > -64)
5453 	    area_bottom = -64;
5454 	  if (area_top < 0)
5455 	    area_top = 0;
5456 	}
5457     }
5458   else
5459     {
5460       if (regs_ever_live[18] && !global_regs[18])
5461 	{
5462 	  if (area_bottom > STACK_POINTER_OFFSET - 16)
5463 	    area_bottom = STACK_POINTER_OFFSET - 16;
5464 	  if (area_top < STACK_POINTER_OFFSET - 8)
5465 	    area_top = STACK_POINTER_OFFSET - 8;
5466 	}
5467       if (regs_ever_live[19] && !global_regs[19])
5468 	{
5469 	  if (area_bottom > STACK_POINTER_OFFSET - 8)
5470 	    area_bottom = STACK_POINTER_OFFSET - 8;
5471 	  if (area_top < STACK_POINTER_OFFSET)
5472 	    area_top = STACK_POINTER_OFFSET;
5473 	}
5474     }
5475 
5476   /* Check whether we can access the register save area.
5477      If not, increment the frame pointer as required.  */
5478 
5479   if (area_top <= area_bottom)
5480     {
5481       /* Nothing to restore.  */
5482     }
5483   else if (cfun->machine->frame_size + area_bottom >= 0
5484            && cfun->machine->frame_size + area_top <= 4096)
5485     {
5486       /* Area is in range.  */
5487       offset = cfun->machine->frame_size;
5488     }
5489   else
5490     {
5491       rtx insn, frame_off;
5492 
5493       offset = area_bottom < 0 ? -area_bottom : 0;
5494       frame_off = GEN_INT (cfun->machine->frame_size - offset);
5495 
5496       if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
5497 	frame_off = force_const_mem (Pmode, frame_off);
5498 
5499       insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
5500     }
5501 
5502   /* Restore call saved fprs.  */
5503 
5504   if (TARGET_64BIT)
5505     {
5506       int i;
5507 
5508       if (cfun->machine->save_fprs_p)
5509 	for (i = 24; i < 32; i++)
5510 	  if (regs_ever_live[i] && !global_regs[i])
5511 	    restore_fpr (frame_pointer,
5512 			 offset - 64 + (i-24) * 8, i);
5513     }
5514   else
5515     {
5516       if (regs_ever_live[18] && !global_regs[18])
5517 	restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
5518       if (regs_ever_live[19] && !global_regs[19])
5519 	restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
5520     }
5521 
5522   /* Return register.  */
5523 
5524   return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5525 
5526   /* Restore call saved gprs.  */
5527 
5528   if (cfun->machine->first_restore_gpr != -1)
5529     {
5530       rtx insn, addr;
5531       int i;
5532 
5533       /* Check for global register and save them
5534 	 to stack location from where they get restored.  */
5535 
5536       for (i = cfun->machine->first_restore_gpr;
5537 	   i <= cfun->machine->last_save_gpr;
5538 	   i++)
5539 	{
5540 	  /* These registers are special and need to be
5541 	     restored in any case.  */
5542 	  if (i == STACK_POINTER_REGNUM
5543               || i == RETURN_REGNUM
5544               || i == BASE_REGISTER
5545               || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
5546 	    continue;
5547 
5548 	  if (global_regs[i])
5549 	    {
5550 	      addr = plus_constant (frame_pointer,
5551 		     offset + i * UNITS_PER_WORD);
5552 	      addr = gen_rtx_MEM (Pmode, addr);
5553 	      set_mem_alias_set (addr, s390_sr_alias_set);
5554 	      emit_move_insn (addr, gen_rtx_REG (Pmode, i));
5555 	    }
5556 	}
5557 
5558       /* Fetch return address from stack before load multiple,
5559 	 this will do good for scheduling.  */
5560 
5561       if (!current_function_is_leaf)
5562 	{
5563 	  int return_regnum = find_unused_clobbered_reg();
5564 	  if (!return_regnum)
5565 	    return_regnum = 4;
5566 	  return_reg = gen_rtx_REG (Pmode, return_regnum);
5567 
5568 	  addr = plus_constant (frame_pointer,
5569 				offset + RETURN_REGNUM * UNITS_PER_WORD);
5570 	  addr = gen_rtx_MEM (Pmode, addr);
5571 	  set_mem_alias_set (addr, s390_sr_alias_set);
5572 	  emit_move_insn (return_reg, addr);
5573 	}
5574 
5575       /* ??? As references to the base register are not made
5576 	 explicit in insn RTX code, we have to add a barrier here
5577 	 to prevent incorrect scheduling.  */
5578 
5579       emit_insn (gen_blockage());
5580 
5581       insn = restore_gprs (frame_pointer, offset,
5582 			   cfun->machine->first_restore_gpr,
5583 			   cfun->machine->last_save_gpr);
5584       emit_insn (insn);
5585     }
5586 
5587   /* Return to caller.  */
5588 
5589   p = rtvec_alloc (2);
5590 
5591   RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
5592   RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
5593   emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
5594 }
5595 
5596 
5597 /* Return the size in bytes of a function argument of
5598    type TYPE and/or mode MODE.  At least one of TYPE or
5599    MODE must be specified.  */
5600 
5601 static int
s390_function_arg_size(mode,type)5602 s390_function_arg_size (mode, type)
5603      enum machine_mode mode;
5604      tree type;
5605 {
5606   if (type)
5607     return int_size_in_bytes (type);
5608 
5609   /* No type info available for some library calls ...  */
5610   if (mode != BLKmode)
5611     return GET_MODE_SIZE (mode);
5612 
5613   /* If we have neither type nor mode, abort */
5614   abort ();
5615 }
5616 
5617 /* Return 1 if a function argument of type TYPE and mode MODE
5618    is to be passed by reference.  The ABI specifies that only
5619    structures of size 1, 2, 4, or 8 bytes are passed by value,
5620    all other structures (and complex numbers) are passed by
5621    reference.  */
5622 
5623 int
s390_function_arg_pass_by_reference(mode,type)5624 s390_function_arg_pass_by_reference (mode, type)
5625      enum machine_mode mode;
5626      tree type;
5627 {
5628   int size = s390_function_arg_size (mode, type);
5629   if (size > 8)
5630     return 1;
5631 
5632   if (type)
5633     {
5634       if (AGGREGATE_TYPE_P (type) &&
5635           size != 1 && size != 2 && size != 4 && size != 8)
5636         return 1;
5637 
5638       if (TREE_CODE (type) == COMPLEX_TYPE
5639           || TREE_CODE (type) == VECTOR_TYPE)
5640         return 1;
5641     }
5642   return 0;
5643 
5644 }
5645 
5646 /* Update the data in CUM to advance over an argument of mode MODE and
5647    data type TYPE.  (TYPE is null for libcalls where that information
5648    may not be available.).  The boolean NAMED specifies whether the
5649    argument is a named argument (as opposed to an unnamed argument
5650    matching an ellipsis).  */
5651 
5652 void
s390_function_arg_advance(cum,mode,type,named)5653 s390_function_arg_advance (cum, mode, type, named)
5654      CUMULATIVE_ARGS *cum;
5655      enum machine_mode mode;
5656      tree type;
5657      int named ATTRIBUTE_UNUSED;
5658 {
5659   if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
5660     {
5661       cum->fprs++;
5662     }
5663   else if (s390_function_arg_pass_by_reference (mode, type))
5664     {
5665       cum->gprs += 1;
5666     }
5667   else
5668     {
5669       int size = s390_function_arg_size (mode, type);
5670       cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
5671     }
5672 }
5673 
5674 /* Define where to put the arguments to a function.
5675    Value is zero to push the argument on the stack,
5676    or a hard register in which to store the argument.
5677 
5678    MODE is the argument's machine mode.
5679    TYPE is the data type of the argument (as a tree).
5680     This is null for libcalls where that information may
5681     not be available.
5682    CUM is a variable of type CUMULATIVE_ARGS which gives info about
5683     the preceding args and about the function being called.
5684    NAMED is nonzero if this argument is a named parameter
5685     (otherwise it is an extra parameter matching an ellipsis).
5686 
5687    On S/390, we use general purpose registers 2 through 6 to
5688    pass integer, pointer, and certain structure arguments, and
5689    floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
5690    to pass floating point arguments.  All remaining arguments
5691    are pushed to the stack.  */
5692 
5693 rtx
s390_function_arg(cum,mode,type,named)5694 s390_function_arg (cum, mode, type, named)
5695      CUMULATIVE_ARGS *cum;
5696      enum machine_mode mode;
5697      tree type;
5698      int named ATTRIBUTE_UNUSED;
5699 {
5700   if (s390_function_arg_pass_by_reference (mode, type))
5701       return 0;
5702 
5703   if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
5704     {
5705       if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
5706 	return 0;
5707       else
5708 	return gen_rtx (REG, mode, cum->fprs + 16);
5709     }
5710   else
5711     {
5712       int size = s390_function_arg_size (mode, type);
5713       int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
5714 
5715       if (cum->gprs + n_gprs > 5)
5716 	return 0;
5717       else
5718 	return gen_rtx (REG, mode, cum->gprs + 2);
5719     }
5720 }
5721 
5722 
5723 /* Create and return the va_list datatype.
5724 
5725    On S/390, va_list is an array type equivalent to
5726 
5727       typedef struct __va_list_tag
5728         {
5729             long __gpr;
5730             long __fpr;
5731             void *__overflow_arg_area;
5732             void *__reg_save_area;
5733 
5734         } va_list[1];
5735 
5736    where __gpr and __fpr hold the number of general purpose
5737    or floating point arguments used up to now, respectively,
5738    __overflow_arg_area points to the stack location of the
5739    next argument passed on the stack, and __reg_save_area
5740    always points to the start of the register area in the
5741    call frame of the current function.  The function prologue
5742    saves all registers used for argument passing into this
5743    area if the function uses variable arguments.  */
5744 
5745 tree
s390_build_va_list()5746 s390_build_va_list ()
5747 {
5748   tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
5749 
5750   record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5751 
5752   type_decl =
5753     build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5754 
5755   f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
5756 		      long_integer_type_node);
5757   f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
5758 		      long_integer_type_node);
5759   f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
5760 		      ptr_type_node);
5761   f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
5762 		      ptr_type_node);
5763 
5764   DECL_FIELD_CONTEXT (f_gpr) = record;
5765   DECL_FIELD_CONTEXT (f_fpr) = record;
5766   DECL_FIELD_CONTEXT (f_ovf) = record;
5767   DECL_FIELD_CONTEXT (f_sav) = record;
5768 
5769   TREE_CHAIN (record) = type_decl;
5770   TYPE_NAME (record) = type_decl;
5771   TYPE_FIELDS (record) = f_gpr;
5772   TREE_CHAIN (f_gpr) = f_fpr;
5773   TREE_CHAIN (f_fpr) = f_ovf;
5774   TREE_CHAIN (f_ovf) = f_sav;
5775 
5776   layout_type (record);
5777 
5778   /* The correct type is an array type of one element.  */
5779   return build_array_type (record, build_index_type (size_zero_node));
5780 }
5781 
5782 /* Implement va_start by filling the va_list structure VALIST.
5783    STDARG_P is always true, and ignored.
5784    NEXTARG points to the first anonymous stack argument.
5785 
5786    The following global variables are used to initialize
5787    the va_list structure:
5788 
5789      current_function_args_info:
5790        holds number of gprs and fprs used for named arguments.
5791      current_function_arg_offset_rtx:
5792        holds the offset of the first anonymous stack argument
5793        (relative to the virtual arg pointer).  */
5794 
5795 void
s390_va_start(valist,nextarg)5796 s390_va_start (valist, nextarg)
5797      tree valist;
5798      rtx nextarg ATTRIBUTE_UNUSED;
5799 {
5800   HOST_WIDE_INT n_gpr, n_fpr;
5801   int off;
5802   tree f_gpr, f_fpr, f_ovf, f_sav;
5803   tree gpr, fpr, ovf, sav, t;
5804 
5805   f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5806   f_fpr = TREE_CHAIN (f_gpr);
5807   f_ovf = TREE_CHAIN (f_fpr);
5808   f_sav = TREE_CHAIN (f_ovf);
5809 
5810   valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5811   gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5812   fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5813   ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5814   sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5815 
5816   /* Count number of gp and fp argument registers used.  */
5817 
5818   n_gpr = current_function_args_info.gprs;
5819   n_fpr = current_function_args_info.fprs;
5820 
5821   t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
5822   TREE_SIDE_EFFECTS (t) = 1;
5823   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5824 
5825   t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
5826   TREE_SIDE_EFFECTS (t) = 1;
5827   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5828 
5829   /* Find the overflow area.  */
5830   t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5831 
5832   off = INTVAL (current_function_arg_offset_rtx);
5833   off = off < 0 ? 0 : off;
5834   if (TARGET_DEBUG_ARG)
5835     fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
5836 	     (int)n_gpr, (int)n_fpr, off);
5837 
5838   t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
5839 
5840   t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5841   TREE_SIDE_EFFECTS (t) = 1;
5842   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5843 
5844   /* Find the register save area.  */
5845   t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
5846   t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5847 	     build_int_2 (-STACK_POINTER_OFFSET, -1));
5848   t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5849   TREE_SIDE_EFFECTS (t) = 1;
5850   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5851 }
5852 
5853 /* Implement va_arg by updating the va_list structure
5854    VALIST as required to retrieve an argument of type
5855    TYPE, and returning that argument.
5856 
5857    Generates code equivalent to:
5858 
5859    if (integral value) {
5860      if (size  <= 4 && args.gpr < 5 ||
5861          size  > 4 && args.gpr < 4 )
5862        ret = args.reg_save_area[args.gpr+8]
5863      else
5864        ret = *args.overflow_arg_area++;
5865    } else if (float value) {
5866      if (args.fgpr < 2)
5867        ret = args.reg_save_area[args.fpr+64]
5868      else
5869        ret = *args.overflow_arg_area++;
5870    } else if (aggregate value) {
5871      if (args.gpr < 5)
5872        ret = *args.reg_save_area[args.gpr]
5873      else
5874        ret = **args.overflow_arg_area++;
5875    } */
5876 
5877 rtx
s390_va_arg(valist,type)5878 s390_va_arg (valist, type)
5879      tree valist;
5880      tree type;
5881 {
5882   tree f_gpr, f_fpr, f_ovf, f_sav;
5883   tree gpr, fpr, ovf, sav, reg, t, u;
5884   int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
5885   rtx lab_false, lab_over, addr_rtx, r;
5886 
5887   f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5888   f_fpr = TREE_CHAIN (f_gpr);
5889   f_ovf = TREE_CHAIN (f_fpr);
5890   f_sav = TREE_CHAIN (f_ovf);
5891 
5892   valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5893   gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5894   fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5895   ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5896   sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5897 
5898   size = int_size_in_bytes (type);
5899 
5900   if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
5901     {
5902       if (TARGET_DEBUG_ARG)
5903 	{
5904 	  fprintf (stderr, "va_arg: aggregate type");
5905 	  debug_tree (type);
5906 	}
5907 
5908       /* Aggregates are passed by reference.  */
5909       indirect_p = 1;
5910       reg = gpr;
5911       n_reg = 1;
5912       sav_ofs = 2 * UNITS_PER_WORD;
5913       sav_scale = UNITS_PER_WORD;
5914       size = UNITS_PER_WORD;
5915       max_reg = 4;
5916     }
5917   else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
5918     {
5919       if (TARGET_DEBUG_ARG)
5920 	{
5921 	  fprintf (stderr, "va_arg: float type");
5922 	  debug_tree (type);
5923 	}
5924 
5925       /* FP args go in FP registers, if present.  */
5926       indirect_p = 0;
5927       reg = fpr;
5928       n_reg = 1;
5929       sav_ofs = 16 * UNITS_PER_WORD;
5930       sav_scale = 8;
5931       /* TARGET_64BIT has up to 4 parameter in fprs */
5932       max_reg = TARGET_64BIT ? 3 : 1;
5933     }
5934   else
5935     {
5936       if (TARGET_DEBUG_ARG)
5937 	{
5938 	  fprintf (stderr, "va_arg: other type");
5939 	  debug_tree (type);
5940 	}
5941 
5942       /* Otherwise into GP registers.  */
5943       indirect_p = 0;
5944       reg = gpr;
5945       n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
5946       sav_ofs = 2 * UNITS_PER_WORD;
5947       if (TARGET_64BIT)
5948 	sav_ofs += TYPE_MODE (type) == SImode ? 4 :
5949 	           TYPE_MODE (type) == HImode ? 6 :
5950 	           TYPE_MODE (type) == QImode ? 7 : 0;
5951       else
5952 	sav_ofs += TYPE_MODE (type) == HImode ? 2 :
5953 	           TYPE_MODE (type) == QImode ? 3 : 0;
5954 
5955       sav_scale = UNITS_PER_WORD;
5956       if (n_reg > 1)
5957 	max_reg = 3;
5958       else
5959 	max_reg = 4;
5960     }
5961 
5962   /* Pull the value out of the saved registers ...  */
5963 
5964   lab_false = gen_label_rtx ();
5965   lab_over = gen_label_rtx ();
5966   addr_rtx = gen_reg_rtx (Pmode);
5967 
5968   emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
5969 			   GEN_INT (max_reg),
5970 			   GT, const1_rtx, Pmode, 0, lab_false);
5971 
5972   if (sav_ofs)
5973     t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
5974   else
5975     t = sav;
5976 
5977   u = build (MULT_EXPR, long_integer_type_node,
5978 	     reg, build_int_2 (sav_scale, 0));
5979   TREE_SIDE_EFFECTS (u) = 1;
5980 
5981   t = build (PLUS_EXPR, ptr_type_node, t, u);
5982   TREE_SIDE_EFFECTS (t) = 1;
5983 
5984   r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5985   if (r != addr_rtx)
5986     emit_move_insn (addr_rtx, r);
5987 
5988 
5989   emit_jump_insn (gen_jump (lab_over));
5990   emit_barrier ();
5991   emit_label (lab_false);
5992 
5993   /* ... Otherwise out of the overflow area.  */
5994 
5995   t = save_expr (ovf);
5996 
5997 
5998   /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated.  */
5999   if (size < UNITS_PER_WORD)
6000     {
6001       t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
6002       t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6003       TREE_SIDE_EFFECTS (t) = 1;
6004       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6005 
6006       t = save_expr (ovf);
6007     }
6008 
6009   r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6010   if (r != addr_rtx)
6011     emit_move_insn (addr_rtx, r);
6012 
6013   t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
6014   t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6015   TREE_SIDE_EFFECTS (t) = 1;
6016   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6017 
6018   emit_label (lab_over);
6019 
6020   /* If less than max_regs a registers are retrieved out
6021      of register save area, increment.  */
6022 
6023   u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
6024 	     build_int_2 (n_reg, 0));
6025   TREE_SIDE_EFFECTS (u) = 1;
6026   expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
6027 
6028   if (indirect_p)
6029     {
6030       r = gen_rtx_MEM (Pmode, addr_rtx);
6031       set_mem_alias_set (r, get_varargs_alias_set ());
6032       emit_move_insn (addr_rtx, r);
6033     }
6034 
6035 
6036   return addr_rtx;
6037 }
6038 
6039 
6040 /* Builtins.  */
6041 
6042 enum s390_builtin
6043 {
6044   S390_BUILTIN_THREAD_POINTER,
6045   S390_BUILTIN_SET_THREAD_POINTER,
6046 
6047   S390_BUILTIN_max
6048 };
6049 
6050 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
6051   CODE_FOR_get_tp_64,
6052   CODE_FOR_set_tp_64
6053 };
6054 
6055 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
6056   CODE_FOR_get_tp_31,
6057   CODE_FOR_set_tp_31
6058 };
6059 
6060 static void
s390_init_builtins()6061 s390_init_builtins ()
6062 {
6063   tree ftype;
6064 
6065   ftype = build_function_type (ptr_type_node, void_list_node);
6066   builtin_function ("__builtin_thread_pointer", ftype,
6067 		    S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
6068 		    NULL, NULL_TREE);
6069 
6070   ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
6071   builtin_function ("__builtin_set_thread_pointer", ftype,
6072 		    S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
6073 		    NULL, NULL_TREE);
6074 }
6075 
6076 /* Expand an expression EXP that calls a built-in function,
6077    with result going to TARGET if that's convenient
6078    (and in mode MODE if that's convenient).
6079    SUBTARGET may be used as the target for computing one of EXP's operands.
6080    IGNORE is nonzero if the value is to be ignored.  */
6081 
6082 static rtx
s390_expand_builtin(exp,target,subtarget,mode,ignore)6083 s390_expand_builtin (exp, target, subtarget, mode, ignore)
6084      tree exp;
6085      rtx target;
6086      rtx subtarget ATTRIBUTE_UNUSED;
6087      enum machine_mode mode ATTRIBUTE_UNUSED;
6088      int ignore ATTRIBUTE_UNUSED;
6089 {
6090 #define MAX_ARGS 2
6091 
6092   unsigned int const *code_for_builtin =
6093     TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
6094 
6095   tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6096   unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6097   tree arglist = TREE_OPERAND (exp, 1);
6098   enum insn_code icode;
6099   rtx op[MAX_ARGS], pat;
6100   int arity;
6101   bool nonvoid;
6102 
6103   if (fcode >= S390_BUILTIN_max)
6104     internal_error ("bad builtin fcode");
6105   icode = code_for_builtin[fcode];
6106   if (icode == 0)
6107     internal_error ("bad builtin fcode");
6108 
6109   nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
6110 
6111   for (arglist = TREE_OPERAND (exp, 1), arity = 0;
6112        arglist;
6113        arglist = TREE_CHAIN (arglist), arity++)
6114     {
6115       const struct insn_operand_data *insn_op;
6116 
6117       tree arg = TREE_VALUE (arglist);
6118       if (arg == error_mark_node)
6119 	return NULL_RTX;
6120       if (arity > MAX_ARGS)
6121 	return NULL_RTX;
6122 
6123       insn_op = &insn_data[icode].operand[arity + nonvoid];
6124 
6125       op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
6126 
6127       if (!(*insn_op->predicate) (op[arity], insn_op->mode))
6128 	op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
6129     }
6130 
6131   if (nonvoid)
6132     {
6133       enum machine_mode tmode = insn_data[icode].operand[0].mode;
6134       if (!target
6135 	  || GET_MODE (target) != tmode
6136 	  || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6137 	target = gen_reg_rtx (tmode);
6138     }
6139 
6140   switch (arity)
6141     {
6142     case 0:
6143       pat = GEN_FCN (icode) (target);
6144       break;
6145     case 1:
6146       if (nonvoid)
6147         pat = GEN_FCN (icode) (target, op[0]);
6148       else
6149 	pat = GEN_FCN (icode) (op[0]);
6150       break;
6151     case 2:
6152       pat = GEN_FCN (icode) (target, op[0], op[1]);
6153       break;
6154     default:
6155       abort ();
6156     }
6157   if (!pat)
6158     return NULL_RTX;
6159   emit_insn (pat);
6160 
6161   if (nonvoid)
6162     return target;
6163   else
6164     return const0_rtx;
6165 }
6166 
6167 
6168 /* Output assembly code for the trampoline template to
6169    stdio stream FILE.
6170 
6171    On S/390, we use gpr 1 internally in the trampoline code;
6172    gpr 0 is used to hold the static chain.  */
6173 
6174 void
s390_trampoline_template(file)6175 s390_trampoline_template (file)
6176      FILE *file;
6177 {
6178   if (TARGET_64BIT)
6179     {
6180       fprintf (file, "larl\t%s,0f\n", reg_names[1]);
6181       fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
6182       fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
6183       fprintf (file, "br\t%s\n", reg_names[1]);
6184       fprintf (file, "0:\t.quad\t0\n");
6185       fprintf (file, ".quad\t0\n");
6186     }
6187   else
6188     {
6189       fprintf (file, "basr\t%s,0\n", reg_names[1]);
6190       fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
6191       fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
6192       fprintf (file, "br\t%s\n", reg_names[1]);
6193       fprintf (file, ".long\t0\n");
6194       fprintf (file, ".long\t0\n");
6195     }
6196 }
6197 
6198 /* Emit RTL insns to initialize the variable parts of a trampoline.
6199    FNADDR is an RTX for the address of the function's pure code.
6200    CXT is an RTX for the static chain value for the function.  */
6201 
6202 void
s390_initialize_trampoline(addr,fnaddr,cxt)6203 s390_initialize_trampoline (addr, fnaddr, cxt)
6204      rtx addr;
6205      rtx fnaddr;
6206      rtx cxt;
6207 {
6208   emit_move_insn (gen_rtx
6209 		  (MEM, Pmode,
6210 		   memory_address (Pmode,
6211 		   plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
6212   emit_move_insn (gen_rtx
6213 		  (MEM, Pmode,
6214 		   memory_address (Pmode,
6215 		   plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
6216 }
6217 
6218 /* Return rtx for 64-bit constant formed from the 32-bit subwords
6219    LOW and HIGH, independent of the host word size.  */
6220 
6221 rtx
s390_gen_rtx_const_DI(high,low)6222 s390_gen_rtx_const_DI (high, low)
6223      int high;
6224      int low;
6225 {
6226 #if HOST_BITS_PER_WIDE_INT >= 64
6227   HOST_WIDE_INT val;
6228   val = (HOST_WIDE_INT)high;
6229   val <<= 32;
6230   val |= (HOST_WIDE_INT)low;
6231 
6232   return GEN_INT (val);
6233 #else
6234 #if HOST_BITS_PER_WIDE_INT >= 32
6235   return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
6236 #else
6237   abort ();
6238 #endif
6239 #endif
6240 }
6241 
6242 /* Output assembler code to FILE to increment profiler label # LABELNO
6243    for profiling a function entry.  */
6244 
6245 void
s390_function_profiler(file,labelno)6246 s390_function_profiler (file, labelno)
6247      FILE *file;
6248      int labelno;
6249 {
6250   rtx op[7];
6251 
6252   char label[128];
6253   ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
6254 
6255   fprintf (file, "# function profiler \n");
6256 
6257   op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
6258   op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
6259   op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
6260 
6261   op[2] = gen_rtx_REG (Pmode, 1);
6262   op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
6263   SYMBOL_REF_FLAG (op[3]) = 1;
6264 
6265   op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
6266   if (flag_pic)
6267     {
6268       op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), 113);
6269       op[4] = gen_rtx_CONST (Pmode, op[4]);
6270     }
6271 
6272   if (TARGET_64BIT)
6273     {
6274       output_asm_insn ("stg\t%0,%1", op);
6275       output_asm_insn ("larl\t%2,%3", op);
6276       output_asm_insn ("brasl\t%0,%4", op);
6277       output_asm_insn ("lg\t%0,%1", op);
6278     }
6279   else if (!flag_pic)
6280     {
6281       op[6] = gen_label_rtx ();
6282 
6283       output_asm_insn ("st\t%0,%1", op);
6284       output_asm_insn ("bras\t%2,%l6", op);
6285       output_asm_insn (".long\t%4", op);
6286       output_asm_insn (".long\t%3", op);
6287       ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
6288       output_asm_insn ("l\t%0,0(%2)", op);
6289       output_asm_insn ("l\t%2,4(%2)", op);
6290       output_asm_insn ("basr\t%0,%0", op);
6291       output_asm_insn ("l\t%0,%1", op);
6292     }
6293   else
6294     {
6295       op[5] = gen_label_rtx ();
6296       op[6] = gen_label_rtx ();
6297 
6298       output_asm_insn ("st\t%0,%1", op);
6299       output_asm_insn ("bras\t%2,%l6", op);
6300       ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
6301       output_asm_insn (".long\t%4-%l5", op);
6302       output_asm_insn (".long\t%3-%l5", op);
6303       ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
6304       output_asm_insn ("lr\t%0,%2", op);
6305       output_asm_insn ("a\t%0,0(%2)", op);
6306       output_asm_insn ("a\t%2,4(%2)", op);
6307       output_asm_insn ("basr\t%0,%0", op);
6308       output_asm_insn ("l\t%0,%1", op);
6309     }
6310 }
6311 
6312 /* Select section for constant in constant pool.  In 32-bit mode,
6313    constants go in the function section; in 64-bit mode in .rodata.  */
6314 
6315 static void
s390_select_rtx_section(mode,x,align)6316 s390_select_rtx_section (mode, x, align)
6317      enum machine_mode mode ATTRIBUTE_UNUSED;
6318      rtx x ATTRIBUTE_UNUSED;
6319      unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
6320 {
6321   if (TARGET_64BIT)
6322     readonly_data_section ();
6323   else
6324     function_section (current_function_decl);
6325 }
6326 
6327 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
6328    into its name and SYMBOL_REF_FLAG.  */
6329 
6330 static void
s390_encode_section_info(decl,first)6331 s390_encode_section_info (decl, first)
6332      tree decl;
6333      int first ATTRIBUTE_UNUSED;
6334 {
6335   bool local_p = (*targetm.binds_local_p) (decl);
6336   rtx rtl, symbol;
6337 
6338   rtl = DECL_P (decl) ? DECL_RTL (decl) : TREE_CST_RTL (decl);
6339   if (GET_CODE (rtl) != MEM)
6340     return;
6341   symbol = XEXP (rtl, 0);
6342   if (GET_CODE (symbol) != SYMBOL_REF)
6343     return;
6344 
6345   /* When using PIC, SYMBOL_REF_FLAG marks non-global symbols
6346      that can be accessed directly.  */
6347   if (flag_pic)
6348     SYMBOL_REF_FLAG (symbol) = local_p;
6349 
6350   /* Encode thread-local data with %[GLil] for "global dynamic",
6351      "local dynamic", "initial exec" or "local exec" TLS models,
6352      respectively.  */
6353 
6354   if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL (decl))
6355     {
6356       const char *symbol_str = XSTR (symbol, 0);
6357       char *newstr;
6358       size_t len;
6359       enum tls_model kind = decl_tls_model (decl);
6360 
6361       if (!flag_pic)
6362 	{
6363 	  /* We don't allow non-pic code for shared libraries,
6364 	     so don't generate GD/LD TLS models for non-pic code.  */
6365 	  switch (kind)
6366 	    {
6367 	    case TLS_MODEL_GLOBAL_DYNAMIC:
6368 	      kind = TLS_MODEL_INITIAL_EXEC; break;
6369 	    case TLS_MODEL_LOCAL_DYNAMIC:
6370 	      kind = TLS_MODEL_LOCAL_EXEC; break;
6371 	    default:
6372 	      break;
6373 	    }
6374 	}
6375 
6376       if (symbol_str[0] == '%')
6377 	{
6378 	  if (symbol_str[1] == tls_model_chars[kind])
6379 	    return;
6380 	  symbol_str += 2;
6381 	}
6382       len = strlen (symbol_str) + 1;
6383       newstr = alloca (len + 2);
6384 
6385       newstr[0] = '%';
6386       newstr[1] = tls_model_chars[kind];
6387       memcpy (newstr + 2, symbol_str, len);
6388 
6389       XSTR (symbol, 0) = ggc_alloc_string (newstr, len + 2 - 1);
6390     }
6391 
6392   /* If a variable has a forced alignment to < 2 bytes, mark it
6393      with '@' to prevent it from being used as LARL operand.  */
6394 
6395   else if (TREE_CODE (decl) == VAR_DECL
6396 	   && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16
6397 	   && XSTR (symbol, 0)[0] != '@')
6398     {
6399       const char *symbol_str = XSTR (symbol, 0);
6400       size_t len = strlen (symbol_str) + 1;
6401       char *newstr = alloca (len + 1);
6402 
6403       newstr[0] = '@';
6404       memcpy (newstr + 1, symbol_str, len);
6405 
6406       XSTR (symbol, 0) = ggc_alloc_string (newstr, len + 1 - 1);
6407     }
6408 }
6409 
6410 /* Undo the above when printing symbol names.  */
6411 
6412 static const char *
s390_strip_name_encoding(str)6413 s390_strip_name_encoding (str)
6414      const char *str;
6415 {
6416   if (str[0] == '%')
6417     str += 2;
6418   if (str[0] == '@')
6419     str += 1;
6420   if (str[0] == '*')
6421     str += 1;
6422   return str;
6423 }
6424 
6425 /* Output thunk to FILE that implements a C++ virtual function call (with
6426    multiple inheritance) to FUNCTION.  The thunk adjusts the this pointer
6427    by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
6428    stored at VCALL_OFFSET in the vtable whose address is located at offset 0
6429    relative to the resulting this pointer.  */
6430 
6431 static void
s390_output_mi_thunk(file,thunk,delta,vcall_offset,function)6432 s390_output_mi_thunk (file, thunk, delta, vcall_offset, function)
6433      FILE *file;
6434      tree thunk ATTRIBUTE_UNUSED;
6435      HOST_WIDE_INT delta;
6436      HOST_WIDE_INT vcall_offset;
6437      tree function;
6438 {
6439   rtx op[10];
6440   int nonlocal = 0;
6441 
6442   /* Operand 0 is the target function.  */
6443   op[0] = XEXP (DECL_RTL (function), 0);
6444   if (flag_pic && !SYMBOL_REF_FLAG (op[0]))
6445     {
6446       nonlocal = 1;
6447       op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
6448 			      TARGET_64BIT ? 113 : flag_pic == 2 ? 112 : 110);
6449       op[0] = gen_rtx_CONST (Pmode, op[0]);
6450     }
6451 
6452   /* Operand 1 is the 'this' pointer.  */
6453   if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
6454     op[1] = gen_rtx_REG (Pmode, 3);
6455   else
6456     op[1] = gen_rtx_REG (Pmode, 2);
6457 
6458   /* Operand 2 is the delta.  */
6459   op[2] = GEN_INT (delta);
6460 
6461   /* Operand 3 is the vcall_offset.  */
6462   op[3] = GEN_INT (vcall_offset);
6463 
6464   /* Operand 4 is the temporary register.  */
6465   op[4] = gen_rtx_REG (Pmode, 1);
6466 
6467   /* Operands 5 to 8 can be used as labels.  */
6468   op[5] = NULL_RTX;
6469   op[6] = NULL_RTX;
6470   op[7] = NULL_RTX;
6471   op[8] = NULL_RTX;
6472 
6473   /* Operand 9 can be used for temporary register.  */
6474   op[9] = NULL_RTX;
6475 
6476   /* Generate code.  */
6477   if (TARGET_64BIT)
6478     {
6479       /* Setup literal pool pointer if required.  */
6480       if (!CONST_OK_FOR_LETTER_P (delta, 'K')
6481 	  || !CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6482 	{
6483 	  op[5] = gen_label_rtx ();
6484 	  output_asm_insn ("larl\t%4,%5", op);
6485 	}
6486 
6487       /* Add DELTA to this pointer.  */
6488       if (delta)
6489 	{
6490 	  if (CONST_OK_FOR_LETTER_P (delta, 'J'))
6491 	    output_asm_insn ("la\t%1,%2(%1)", op);
6492 	  else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
6493 	    output_asm_insn ("aghi\t%1,%2", op);
6494 	  else
6495 	    {
6496 	      op[6] = gen_label_rtx ();
6497 	      output_asm_insn ("agf\t%1,%6-%5(%4)", op);
6498 	    }
6499 	}
6500 
6501       /* Perform vcall adjustment.  */
6502       if (vcall_offset)
6503 	{
6504 	  if (CONST_OK_FOR_LETTER_P (vcall_offset, 'J'))
6505 	    {
6506 	      output_asm_insn ("lg\t%4,0(%1)", op);
6507 	      output_asm_insn ("ag\t%1,%3(%4)", op);
6508 	    }
6509 	  else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6510 	    {
6511 	      output_asm_insn ("lghi\t%4,%3", op);
6512 	      output_asm_insn ("ag\t%4,0(%1)", op);
6513 	      output_asm_insn ("ag\t%1,0(%4)", op);
6514 	    }
6515 	  else
6516 	    {
6517 	      op[7] = gen_label_rtx ();
6518 	      output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
6519 	      output_asm_insn ("ag\t%4,0(%1)", op);
6520 	      output_asm_insn ("ag\t%1,0(%4)", op);
6521 	    }
6522 	}
6523 
6524       /* Jump to target.  */
6525       output_asm_insn ("jg\t%0", op);
6526 
6527       /* Output literal pool if required.  */
6528       if (op[5])
6529 	{
6530 	  output_asm_insn (".align\t4", op);
6531 	  ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
6532 	}
6533       if (op[6])
6534 	{
6535 	  ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
6536 	  output_asm_insn (".long\t%2", op);
6537 	}
6538       if (op[7])
6539 	{
6540 	  ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[7]));
6541 	  output_asm_insn (".long\t%3", op);
6542 	}
6543     }
6544   else
6545     {
6546       /* Setup base pointer if required.  */
6547       if (!vcall_offset
6548 	  || !CONST_OK_FOR_LETTER_P (delta, 'K')
6549 	  || !CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6550 	{
6551 	  op[5] = gen_label_rtx ();
6552 	  output_asm_insn ("basr\t%4,0", op);
6553 	  ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
6554 	}
6555 
6556       /* Add DELTA to this pointer.  */
6557       if (delta)
6558 	{
6559 	  if (CONST_OK_FOR_LETTER_P (delta, 'J'))
6560 	    output_asm_insn ("la\t%1,%2(%1)", op);
6561 	  else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
6562 	    output_asm_insn ("ahi\t%1,%2", op);
6563 	  else
6564 	    {
6565 	      op[6] = gen_label_rtx ();
6566 	      output_asm_insn ("a\t%1,%6-%5(%4)", op);
6567 	    }
6568 	}
6569 
6570       /* Perform vcall adjustment.  */
6571       if (vcall_offset)
6572         {
6573 	  if (CONST_OK_FOR_LETTER_P (vcall_offset, 'J'))
6574 	    {
6575 	      output_asm_insn ("lg\t%4,0(%1)", op);
6576 	      output_asm_insn ("a\t%1,%3(%4)", op);
6577 	    }
6578 	  else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6579 	    {
6580 	      output_asm_insn ("lhi\t%4,%3", op);
6581 	      output_asm_insn ("a\t%4,0(%1)", op);
6582 	      output_asm_insn ("a\t%1,0(%4)", op);
6583 	    }
6584 	  else
6585 	    {
6586 	      op[7] = gen_label_rtx ();
6587 	      output_asm_insn ("l\t%4,%7-%5(%4)", op);
6588 	      output_asm_insn ("a\t%4,0(%1)", op);
6589 	      output_asm_insn ("a\t%1,0(%4)", op);
6590 	    }
6591 
6592 	  /* We had to clobber the base pointer register.
6593 	     Re-setup the base pointer (with a different base).  */
6594 	  op[5] = gen_label_rtx ();
6595 	  output_asm_insn ("basr\t%4,0", op);
6596 	  ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
6597 	}
6598 
6599       /* Jump to target.  */
6600       op[8] = gen_label_rtx ();
6601 
6602       if (!flag_pic)
6603 	output_asm_insn ("l\t%4,%8-%5(%4)", op);
6604       else if (!nonlocal)
6605 	output_asm_insn ("a\t%4,%8-%5(%4)", op);
6606       /* We cannot call through .plt, since .plt requires %r12 loaded.  */
6607       else if (flag_pic == 1)
6608 	{
6609 	  output_asm_insn ("a\t%4,%8-%5(%4)", op);
6610 	  output_asm_insn ("l\t%4,%0(%4)", op);
6611 	}
6612       else if (flag_pic == 2)
6613 	{
6614 	  op[9] = gen_rtx_REG (Pmode, 0);
6615 	  output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
6616 	  output_asm_insn ("a\t%4,%8-%5(%4)", op);
6617 	  output_asm_insn ("ar\t%4,%9", op);
6618 	  output_asm_insn ("l\t%4,0(%4)", op);
6619 	}
6620 
6621       output_asm_insn ("br\t%4", op);
6622 
6623       /* Output literal pool.  */
6624       output_asm_insn (".align\t4", op);
6625 
6626       if (nonlocal && flag_pic == 2)
6627 	output_asm_insn (".long\t%0", op);
6628       if (nonlocal)
6629 	{
6630 	  op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
6631 	  SYMBOL_REF_FLAG (op[0]) = 1;
6632 	}
6633 
6634       ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[8]));
6635       if (!flag_pic)
6636 	output_asm_insn (".long\t%0", op);
6637       else
6638 	output_asm_insn (".long\t%0-%5", op);
6639 
6640       if (op[6])
6641 	{
6642 	  ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
6643 	  output_asm_insn (".long\t%2", op);
6644 	}
6645       if (op[7])
6646 	{
6647 	  ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[7]));
6648 	  output_asm_insn (".long\t%3", op);
6649 	}
6650     }
6651 }
6652 
6653 /* How to allocate a 'struct machine_function'.  */
6654 
6655 static struct machine_function *
s390_init_machine_status()6656 s390_init_machine_status ()
6657 {
6658   return ggc_alloc_cleared (sizeof (struct machine_function));
6659 }
6660 
6661 #include "gt-s390.h"
6662