1 /* Xstormy16 target functions. 2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002 3 Free Software Foundation, Inc. 4 Contributed by Red Hat, Inc. 5 6 This file is part of GNU CC. 7 8 GNU CC is free software; you can redistribute it and/or modify 9 it under the terms of the GNU General Public License as published by 10 the Free Software Foundation; either version 2, or (at your option) 11 any later version. 12 13 GNU CC is distributed in the hope that it will be useful, 14 but WITHOUT ANY WARRANTY; without even the implied warranty of 15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 16 GNU General Public License for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GNU CC; see the file COPYING. If not, write to 20 the Free Software Foundation, 59 Temple Place - Suite 330, 21 Boston, MA 02111-1307, USA. */ 22 23 #include "config.h" 24 #include "system.h" 25 #include "rtl.h" 26 #include "regs.h" 27 #include "hard-reg-set.h" 28 #include "real.h" 29 #include "insn-config.h" 30 #include "conditions.h" 31 #include "insn-flags.h" 32 #include "output.h" 33 #include "insn-attr.h" 34 #include "flags.h" 35 #include "recog.h" 36 #include "toplev.h" 37 #include "obstack.h" 38 #include "tree.h" 39 #include "expr.h" 40 #include "optabs.h" 41 #include "output.h" 42 #include "except.h" 43 #include "function.h" 44 #include "target.h" 45 #include "target-def.h" 46 #include "tm_p.h" 47 #include "langhooks.h" 48 49 static rtx emit_addhi3_postreload PARAMS ((rtx, rtx, rtx)); 50 static void xstormy16_asm_out_constructor PARAMS ((rtx, int)); 51 static void xstormy16_asm_out_destructor PARAMS ((rtx, int)); 52 static void xstormy16_encode_section_info PARAMS ((tree, int)); 53 static void xstormy16_asm_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT, 54 HOST_WIDE_INT, tree)); 55 56 static void xstormy16_init_builtins PARAMS ((void)); 57 static rtx xstormy16_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int)); 58 59 /* Define the information needed to generate branch and scc insns. This is 60 stored from the compare operation. */ 61 struct rtx_def * xstormy16_compare_op0; 62 struct rtx_def * xstormy16_compare_op1; 63 64 /* Return 1 if this is a LT, GE, LTU, or GEU operator. */ 65 66 int 67 xstormy16_ineqsi_operator (op, mode) 68 register rtx op; 69 enum machine_mode mode; 70 { 71 enum rtx_code code = GET_CODE (op); 72 73 return ((mode == VOIDmode || GET_MODE (op) == mode) 74 && (code == LT || code == GE || code == LTU || code == GEU)); 75 } 76 77 /* Return 1 if this is an EQ or NE operator. */ 78 79 int 80 equality_operator (op, mode) 81 register rtx op; 82 enum machine_mode mode; 83 { 84 return ((mode == VOIDmode || GET_MODE (op) == mode) 85 && (GET_CODE (op) == EQ || GET_CODE (op) == NE)); 86 } 87 88 /* Return 1 if this is a comparison operator but not an EQ or NE operator. */ 89 90 int 91 inequality_operator (op, mode) 92 register rtx op; 93 enum machine_mode mode; 94 { 95 return comparison_operator (op, mode) && ! equality_operator (op, mode); 96 } 97 98 /* Branches are handled as follows: 99 100 1. HImode compare-and-branches. The machine supports these 101 natively, so the appropriate pattern is emitted directly. 102 103 2. SImode EQ and NE. These are emitted as pairs of HImode 104 compare-and-branches. 105 106 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence 107 of a SImode subtract followed by a branch (not a compare-and-branch), 108 like this: 109 sub 110 sbc 111 blt 112 113 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like: 114 sub 115 sbc 116 blt 117 or 118 bne 119 */ 120 121 /* Emit a branch of kind CODE to location LOC. */ 122 123 void 124 xstormy16_emit_cbranch (code, loc) 125 enum rtx_code code; 126 rtx loc; 127 { 128 rtx op0 = xstormy16_compare_op0; 129 rtx op1 = xstormy16_compare_op1; 130 rtx condition_rtx, loc_ref, branch, cy_clobber; 131 rtvec vec; 132 enum machine_mode mode; 133 134 mode = GET_MODE (op0); 135 if (mode != HImode && mode != SImode) 136 abort (); 137 138 if (mode == SImode 139 && (code == GT || code == LE || code == GTU || code == LEU)) 140 { 141 int unsigned_p = (code == GTU || code == LEU); 142 int gt_p = (code == GT || code == GTU); 143 rtx lab = NULL_RTX; 144 145 if (gt_p) 146 lab = gen_label_rtx (); 147 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, gt_p ? lab : loc); 148 /* This should be generated as a comparison against the temporary 149 created by the previous insn, but reload can't handle that. */ 150 xstormy16_emit_cbranch (gt_p ? NE : EQ, loc); 151 if (gt_p) 152 emit_label (lab); 153 return; 154 } 155 else if (mode == SImode 156 && (code == NE || code == EQ) 157 && op1 != const0_rtx) 158 { 159 rtx lab = NULL_RTX; 160 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD; 161 int i; 162 163 if (code == EQ) 164 lab = gen_label_rtx (); 165 166 for (i = 0; i < num_words - 1; i++) 167 { 168 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode, 169 i * UNITS_PER_WORD); 170 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode, 171 i * UNITS_PER_WORD); 172 xstormy16_emit_cbranch (NE, code == EQ ? lab : loc); 173 } 174 xstormy16_compare_op0 = simplify_gen_subreg (word_mode, op0, mode, 175 i * UNITS_PER_WORD); 176 xstormy16_compare_op1 = simplify_gen_subreg (word_mode, op1, mode, 177 i * UNITS_PER_WORD); 178 xstormy16_emit_cbranch (code, loc); 179 180 if (code == EQ) 181 emit_label (lab); 182 return; 183 } 184 185 /* We can't allow reload to try to generate any reload after a branch, 186 so when some register must match we must make the temporary ourselves. */ 187 if (mode != HImode) 188 { 189 rtx tmp; 190 tmp = gen_reg_rtx (mode); 191 emit_move_insn (tmp, op0); 192 op0 = tmp; 193 } 194 195 condition_rtx = gen_rtx (code, mode, op0, op1); 196 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc); 197 branch = gen_rtx_SET (VOIDmode, pc_rtx, 198 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx, 199 loc_ref, pc_rtx)); 200 201 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (BImode)); 202 203 if (mode == HImode) 204 vec = gen_rtvec (2, branch, cy_clobber); 205 else if (code == NE || code == EQ) 206 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0)); 207 else 208 { 209 rtx sub; 210 #if 0 211 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1)); 212 #else 213 sub = gen_rtx_CLOBBER (SImode, op0); 214 #endif 215 vec = gen_rtvec (3, branch, sub, cy_clobber); 216 } 217 218 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec)); 219 } 220 221 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split 222 the arithmetic operation. Most of the work is done by 223 xstormy16_expand_arith. */ 224 225 void 226 xstormy16_split_cbranch (mode, label, comparison, dest, carry) 227 enum machine_mode mode; 228 rtx label; 229 rtx comparison; 230 rtx dest; 231 rtx carry; 232 { 233 rtx op0 = XEXP (comparison, 0); 234 rtx op1 = XEXP (comparison, 1); 235 rtx seq, last_insn; 236 rtx compare; 237 238 start_sequence (); 239 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1, carry); 240 seq = get_insns (); 241 end_sequence (); 242 243 if (! INSN_P (seq)) 244 abort (); 245 246 last_insn = seq; 247 while (NEXT_INSN (last_insn) != NULL_RTX) 248 last_insn = NEXT_INSN (last_insn); 249 250 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0)); 251 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison)); 252 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label); 253 emit_insn (seq); 254 } 255 256 257 /* Return the string to output a conditional branch to LABEL, which is 258 the operand number of the label. 259 260 OP is the conditional expression, or NULL for branch-always. 261 262 REVERSED is nonzero if we should reverse the sense of the comparison. 263 264 INSN is the insn. */ 265 266 char * 267 xstormy16_output_cbranch_hi (op, label, reversed, insn) 268 rtx op; 269 const char * label; 270 int reversed; 271 rtx insn; 272 { 273 static char string[64]; 274 int need_longbranch = (op != NULL_RTX 275 ? get_attr_length (insn) == 8 276 : get_attr_length (insn) == 4); 277 int really_reversed = reversed ^ need_longbranch; 278 const char *ccode; 279 const char *template; 280 const char *operands; 281 enum rtx_code code; 282 283 if (! op) 284 { 285 if (need_longbranch) 286 ccode = "jmpf"; 287 else 288 ccode = "br"; 289 sprintf (string, "%s %s", ccode, label); 290 return string; 291 } 292 293 code = GET_CODE (op); 294 295 if (GET_CODE (XEXP (op, 0)) != REG) 296 { 297 code = swap_condition (code); 298 operands = "%3,%2"; 299 } 300 else 301 operands = "%2,%3"; 302 303 /* Work out which way this really branches. */ 304 if (really_reversed) 305 code = reverse_condition (code); 306 307 switch (code) 308 { 309 case EQ: ccode = "z"; break; 310 case NE: ccode = "nz"; break; 311 case GE: ccode = "ge"; break; 312 case LT: ccode = "lt"; break; 313 case GT: ccode = "gt"; break; 314 case LE: ccode = "le"; break; 315 case GEU: ccode = "nc"; break; 316 case LTU: ccode = "c"; break; 317 case GTU: ccode = "hi"; break; 318 case LEU: ccode = "ls"; break; 319 320 default: 321 abort (); 322 } 323 324 if (need_longbranch) 325 template = "b%s %s,.+8 | jmpf %s"; 326 else 327 template = "b%s %s,%s"; 328 sprintf (string, template, ccode, operands, label); 329 330 return string; 331 } 332 333 /* Return the string to output a conditional branch to LABEL, which is 334 the operand number of the label, but suitable for the tail of a 335 SImode branch. 336 337 OP is the conditional expression (OP is never NULL_RTX). 338 339 REVERSED is nonzero if we should reverse the sense of the comparison. 340 341 INSN is the insn. */ 342 343 char * 344 xstormy16_output_cbranch_si (op, label, reversed, insn) 345 rtx op; 346 const char * label; 347 int reversed; 348 rtx insn; 349 { 350 static char string[64]; 351 int need_longbranch = get_attr_length (insn) >= 8; 352 int really_reversed = reversed ^ need_longbranch; 353 const char *ccode; 354 const char *template; 355 char prevop[16]; 356 enum rtx_code code; 357 358 code = GET_CODE (op); 359 360 /* Work out which way this really branches. */ 361 if (really_reversed) 362 code = reverse_condition (code); 363 364 switch (code) 365 { 366 case EQ: ccode = "z"; break; 367 case NE: ccode = "nz"; break; 368 case GE: ccode = "ge"; break; 369 case LT: ccode = "lt"; break; 370 case GEU: ccode = "nc"; break; 371 case LTU: ccode = "c"; break; 372 373 /* The missing codes above should never be generated. */ 374 default: 375 abort (); 376 } 377 378 switch (code) 379 { 380 case EQ: case NE: 381 { 382 int regnum; 383 384 if (GET_CODE (XEXP (op, 0)) != REG) 385 abort (); 386 387 regnum = REGNO (XEXP (op, 0)); 388 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]); 389 } 390 break; 391 392 case GE: case LT: case GEU: case LTU: 393 strcpy (prevop, "sbc %2,%3"); 394 break; 395 396 default: 397 abort (); 398 } 399 400 if (need_longbranch) 401 template = "%s | b%s .+6 | jmpf %s"; 402 else 403 template = "%s | b%s %s"; 404 sprintf (string, template, prevop, ccode, label); 405 406 return string; 407 } 408 409 /* Many machines have some registers that cannot be copied directly to or from 410 memory or even from other types of registers. An example is the `MQ' 411 register, which on most machines, can only be copied to or from general 412 registers, but not memory. Some machines allow copying all registers to and 413 from memory, but require a scratch register for stores to some memory 414 locations (e.g., those with symbolic address on the RT, and those with 415 certain symbolic address on the SPARC when compiling PIC). In some cases, 416 both an intermediate and a scratch register are required. 417 418 You should define these macros to indicate to the reload phase that it may 419 need to allocate at least one register for a reload in addition to the 420 register to contain the data. Specifically, if copying X to a register 421 CLASS in MODE requires an intermediate register, you should define 422 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of 423 whose registers can be used as intermediate registers or scratch registers. 424 425 If copying a register CLASS in MODE to X requires an intermediate or scratch 426 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the 427 largest register class required. If the requirements for input and output 428 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used 429 instead of defining both macros identically. 430 431 The values returned by these macros are often `GENERAL_REGS'. Return 432 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied 433 to or from a register of CLASS in MODE without requiring a scratch register. 434 Do not define this macro if it would always return `NO_REGS'. 435 436 If a scratch register is required (either with or without an intermediate 437 register), you should define patterns for `reload_inM' or `reload_outM', as 438 required.. These patterns, which will normally be implemented with a 439 `define_expand', should be similar to the `movM' patterns, except that 440 operand 2 is the scratch register. 441 442 Define constraints for the reload register and scratch register that contain 443 a single register class. If the original reload register (whose class is 444 CLASS) can meet the constraint given in the pattern, the value returned by 445 these macros is used for the class of the scratch register. Otherwise, two 446 additional reload registers are required. Their classes are obtained from 447 the constraints in the insn pattern. 448 449 X might be a pseudo-register or a `subreg' of a pseudo-register, which could 450 either be in a hard register or in memory. Use `true_regnum' to find out; 451 it will return -1 if the pseudo is in memory and the hard register number if 452 it is in a register. 453 454 These macros should not be used in the case where a particular class of 455 registers can only be copied to memory and not to another class of 456 registers. In that case, secondary reload registers are not needed and 457 would not be helpful. Instead, a stack location must be used to perform the 458 copy and the `movM' pattern should use memory as an intermediate storage. 459 This case often occurs between floating-point and general registers. */ 460 461 enum reg_class 462 xstormy16_secondary_reload_class (class, mode, x) 463 enum reg_class class; 464 enum machine_mode mode; 465 rtx x; 466 { 467 /* This chip has the interesting property that only the first eight 468 registers can be moved to/from memory. */ 469 if ((GET_CODE (x) == MEM 470 || ((GET_CODE (x) == SUBREG || GET_CODE (x) == REG) 471 && (true_regnum (x) == -1 472 || true_regnum (x) >= FIRST_PSEUDO_REGISTER))) 473 && ! reg_class_subset_p (class, EIGHT_REGS)) 474 return EIGHT_REGS; 475 476 /* When reloading a PLUS, the carry register will be required 477 unless the inc or dec instructions can be used. */ 478 if (xstormy16_carry_plus_operand (x, mode)) 479 return CARRY_REGS; 480 481 return NO_REGS; 482 } 483 484 /* Recognize a PLUS that needs the carry register. */ 485 int 486 xstormy16_carry_plus_operand (x, mode) 487 rtx x; 488 enum machine_mode mode ATTRIBUTE_UNUSED; 489 { 490 return (GET_CODE (x) == PLUS 491 && GET_CODE (XEXP (x, 1)) == CONST_INT 492 && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4)); 493 } 494 495 496 enum reg_class 497 xstormy16_preferred_reload_class (x, class) 498 enum reg_class class; 499 rtx x; 500 { 501 if (class == GENERAL_REGS 502 && GET_CODE (x) == MEM) 503 return EIGHT_REGS; 504 505 return class; 506 } 507 508 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \ 509 (GET_CODE (X) == CONST_INT \ 510 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096) 511 512 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \ 513 (GET_CODE (X) == CONST_INT \ 514 && INTVAL (X) + (OFFSET) >= 0 \ 515 && INTVAL (X) + (OFFSET) < 0x8000 \ 516 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00)) 517 518 int 519 xstormy16_legitimate_address_p (mode, x, strict) 520 enum machine_mode mode ATTRIBUTE_UNUSED; 521 rtx x; 522 int strict; 523 { 524 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)) 525 return 1; 526 527 if (GET_CODE (x) == PLUS 528 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)) 529 x = XEXP (x, 0); 530 531 if (GET_CODE (x) == POST_INC 532 || GET_CODE (x) == PRE_DEC) 533 x = XEXP (x, 0); 534 535 if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x)) 536 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER)) 537 return 1; 538 539 return 0; 540 } 541 542 /* Return nonzero if memory address X (an RTX) can have different 543 meanings depending on the machine mode of the memory reference it 544 is used for or if the address is valid for some modes but not 545 others. 546 547 Autoincrement and autodecrement addresses typically have mode-dependent 548 effects because the amount of the increment or decrement is the size of the 549 operand being addressed. Some machines have other mode-dependent addresses. 550 Many RISC machines have no mode-dependent addresses. 551 552 You may assume that ADDR is a valid address for the machine. 553 554 On this chip, this is true if the address is valid with an offset 555 of 0 but not of 6, because in that case it cannot be used as an 556 address for DImode or DFmode, or if the address is a post-increment 557 or pre-decrement address. */ 558 int 559 xstormy16_mode_dependent_address_p (x) 560 rtx x; 561 { 562 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0) 563 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6)) 564 return 1; 565 566 if (GET_CODE (x) == PLUS 567 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0) 568 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6)) 569 return 1; 570 571 if (GET_CODE (x) == PLUS) 572 x = XEXP (x, 0); 573 574 if (GET_CODE (x) == POST_INC 575 || GET_CODE (x) == PRE_DEC) 576 return 1; 577 578 return 0; 579 } 580 581 /* A C expression that defines the optional machine-dependent constraint 582 letters (`Q', `R', `S', `T', `U') that can be used to segregate specific 583 types of operands, usually memory references, for the target machine. 584 Normally this macro will not be defined. If it is required for a particular 585 target machine, it should return 1 if VALUE corresponds to the operand type 586 represented by the constraint letter C. If C is not defined as an extra 587 constraint, the value returned should be 0 regardless of VALUE. */ 588 int 589 xstormy16_extra_constraint_p (x, c) 590 rtx x; 591 int c; 592 { 593 switch (c) 594 { 595 /* 'Q' is for pushes. */ 596 case 'Q': 597 return (GET_CODE (x) == MEM 598 && GET_CODE (XEXP (x, 0)) == POST_INC 599 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx); 600 601 /* 'R' is for pops. */ 602 case 'R': 603 return (GET_CODE (x) == MEM 604 && GET_CODE (XEXP (x, 0)) == PRE_DEC 605 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx); 606 607 /* 'S' is for immediate memory addresses. */ 608 case 'S': 609 return (GET_CODE (x) == MEM 610 && GET_CODE (XEXP (x, 0)) == CONST_INT 611 && xstormy16_legitimate_address_p (VOIDmode, XEXP (x, 0), 0)); 612 613 /* 'T' is for Rx. */ 614 case 'T': 615 /* Not implemented yet. */ 616 return 0; 617 618 /* 'U' is for CONST_INT values not between 2 and 15 inclusive, 619 for allocating a scratch register for 32-bit shifts. */ 620 case 'U': 621 return (GET_CODE (x) == CONST_INT 622 && (INTVAL (x) < 2 || INTVAL (x) > 15)); 623 624 default: 625 return 0; 626 } 627 } 628 629 int 630 short_memory_operand (x, mode) 631 rtx x; 632 enum machine_mode mode; 633 { 634 if (! memory_operand (x, mode)) 635 return 0; 636 return (GET_CODE (XEXP (x, 0)) != PLUS); 637 } 638 639 int 640 nonimmediate_nonstack_operand (op, mode) 641 rtx op; 642 enum machine_mode mode; 643 { 644 /* 'Q' is for pushes, 'R' for pops. */ 645 return (nonimmediate_operand (op, mode) 646 && ! xstormy16_extra_constraint_p (op, 'Q') 647 && ! xstormy16_extra_constraint_p (op, 'R')); 648 } 649 650 /* Splitter for the 'move' patterns, for modes not directly implemeted 651 by hardware. Emit insns to copy a value of mode MODE from SRC to 652 DEST. 653 654 This function is only called when reload_completed. 655 */ 656 657 void 658 xstormy16_split_move (mode, dest, src) 659 enum machine_mode mode; 660 rtx dest; 661 rtx src; 662 { 663 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD; 664 int direction, end, i; 665 int src_modifies = 0; 666 int dest_modifies = 0; 667 int src_volatile = 0; 668 int dest_volatile = 0; 669 rtx mem_operand; 670 rtx auto_inc_reg_rtx = NULL_RTX; 671 672 /* Check initial conditions. */ 673 if (! reload_completed 674 || mode == QImode || mode == HImode 675 || ! nonimmediate_operand (dest, mode) 676 || ! general_operand (src, mode)) 677 abort (); 678 679 /* This case is not supported below, and shouldn't be generated. */ 680 if (GET_CODE (dest) == MEM 681 && GET_CODE (src) == MEM) 682 abort (); 683 684 /* This case is very very bad after reload, so trap it now. */ 685 if (GET_CODE (dest) == SUBREG 686 || GET_CODE (src) == SUBREG) 687 abort (); 688 689 /* The general idea is to copy by words, offsetting the source and 690 destination. Normally the least-significant word will be copied 691 first, but for pre-dec operations it's better to copy the 692 most-significant word first. Only one operand can be a pre-dec 693 or post-inc operand. 694 695 It's also possible that the copy overlaps so that the direction 696 must be reversed. */ 697 direction = 1; 698 699 if (GET_CODE (dest) == MEM) 700 { 701 mem_operand = XEXP (dest, 0); 702 dest_modifies = side_effects_p (mem_operand); 703 if (auto_inc_p (mem_operand)) 704 auto_inc_reg_rtx = XEXP (mem_operand, 0); 705 dest_volatile = MEM_VOLATILE_P (dest); 706 if (dest_volatile) 707 { 708 dest = copy_rtx (dest); 709 MEM_VOLATILE_P (dest) = 0; 710 } 711 } 712 else if (GET_CODE (src) == MEM) 713 { 714 mem_operand = XEXP (src, 0); 715 src_modifies = side_effects_p (mem_operand); 716 if (auto_inc_p (mem_operand)) 717 auto_inc_reg_rtx = XEXP (mem_operand, 0); 718 src_volatile = MEM_VOLATILE_P (src); 719 if (src_volatile) 720 { 721 src = copy_rtx (src); 722 MEM_VOLATILE_P (src) = 0; 723 } 724 } 725 else 726 mem_operand = NULL_RTX; 727 728 if (mem_operand == NULL_RTX) 729 { 730 if (GET_CODE (src) == REG 731 && GET_CODE (dest) == REG 732 && reg_overlap_mentioned_p (dest, src) 733 && REGNO (dest) > REGNO (src)) 734 direction = -1; 735 } 736 else if (GET_CODE (mem_operand) == PRE_DEC 737 || (GET_CODE (mem_operand) == PLUS 738 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC)) 739 direction = -1; 740 else if (GET_CODE (src) == MEM 741 && reg_overlap_mentioned_p (dest, src)) 742 { 743 int regno; 744 if (GET_CODE (dest) != REG) 745 abort (); 746 regno = REGNO (dest); 747 748 if (! refers_to_regno_p (regno, regno + num_words, mem_operand, 0)) 749 abort (); 750 751 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0)) 752 direction = -1; 753 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words, 754 mem_operand, 0)) 755 direction = 1; 756 else 757 /* This means something like 758 (set (reg:DI r0) (mem:DI (reg:HI r1))) 759 which we'd need to support by doing the set of the second word 760 last. */ 761 abort (); 762 } 763 764 end = direction < 0 ? -1 : num_words; 765 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction) 766 { 767 rtx w_src, w_dest, insn; 768 769 if (src_modifies) 770 w_src = gen_rtx_MEM (word_mode, mem_operand); 771 else 772 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD); 773 if (src_volatile) 774 MEM_VOLATILE_P (w_src) = 1; 775 if (dest_modifies) 776 w_dest = gen_rtx_MEM (word_mode, mem_operand); 777 else 778 w_dest = simplify_gen_subreg (word_mode, dest, mode, 779 i * UNITS_PER_WORD); 780 if (dest_volatile) 781 MEM_VOLATILE_P (w_dest) = 1; 782 783 /* The simplify_subreg calls must always be able to simplify. */ 784 if (GET_CODE (w_src) == SUBREG 785 || GET_CODE (w_dest) == SUBREG) 786 abort (); 787 788 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src)); 789 if (auto_inc_reg_rtx) 790 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, 791 auto_inc_reg_rtx, 792 REG_NOTES (insn)); 793 } 794 } 795 796 /* Expander for the 'move' patterns. Emit insns to copy a value of 797 mode MODE from SRC to DEST. */ 798 799 void 800 xstormy16_expand_move (mode, dest, src) 801 enum machine_mode mode; 802 rtx dest; 803 rtx src; 804 { 805 /* There are only limited immediate-to-memory move instructions. */ 806 if (! reload_in_progress 807 && ! reload_completed 808 && GET_CODE (dest) == MEM 809 && (GET_CODE (XEXP (dest, 0)) != CONST_INT 810 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0)) 811 && GET_CODE (src) != REG 812 && GET_CODE (src) != SUBREG) 813 src = copy_to_mode_reg (mode, src); 814 815 /* Don't emit something we would immediately split. */ 816 if (reload_completed 817 && mode != HImode && mode != QImode) 818 { 819 xstormy16_split_move (mode, dest, src); 820 return; 821 } 822 823 emit_insn (gen_rtx_SET (VOIDmode, dest, src)); 824 } 825 826 827 /* Stack Layout: 828 829 The stack is laid out as follows: 830 831 SP-> 832 FP-> Local variables 833 Register save area (up to 4 words) 834 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words) 835 836 AP-> Return address (two words) 837 9th procedure parameter word 838 10th procedure parameter word 839 ... 840 last procedure parameter word 841 842 The frame pointer location is tuned to make it most likely that all 843 parameters and local variables can be accessed using a load-indexed 844 instruction. */ 845 846 /* A structure to describe the layout. */ 847 struct xstormy16_stack_layout 848 { 849 /* Size of the topmost three items on the stack. */ 850 int locals_size; 851 int register_save_size; 852 int stdarg_save_size; 853 /* Sum of the above items. */ 854 int frame_size; 855 /* Various offsets. */ 856 int first_local_minus_ap; 857 int sp_minus_fp; 858 int fp_minus_ap; 859 }; 860 861 /* Does REGNO need to be saved? */ 862 #define REG_NEEDS_SAVE(REGNUM, IFUN) \ 863 ((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM]) \ 864 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \ 865 && (regs_ever_live[REGNUM] || ! current_function_is_leaf))) 866 867 /* Compute the stack layout. */ 868 struct xstormy16_stack_layout 869 xstormy16_compute_stack_layout () 870 { 871 struct xstormy16_stack_layout layout; 872 int regno; 873 const int ifun = xstormy16_interrupt_function_p (); 874 875 layout.locals_size = get_frame_size (); 876 877 layout.register_save_size = 0; 878 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 879 if (REG_NEEDS_SAVE (regno, ifun)) 880 layout.register_save_size += UNITS_PER_WORD; 881 882 if (current_function_stdarg) 883 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD; 884 else 885 layout.stdarg_save_size = 0; 886 887 layout.frame_size = (layout.locals_size 888 + layout.register_save_size 889 + layout.stdarg_save_size); 890 891 if (current_function_args_size <= 2048 && current_function_args_size != -1) 892 { 893 if (layout.frame_size + INCOMING_FRAME_SP_OFFSET 894 + current_function_args_size <= 2048) 895 layout.fp_minus_ap = layout.frame_size + INCOMING_FRAME_SP_OFFSET; 896 else 897 layout.fp_minus_ap = 2048 - current_function_args_size; 898 } 899 else 900 layout.fp_minus_ap = (layout.stdarg_save_size 901 + layout.register_save_size 902 + INCOMING_FRAME_SP_OFFSET); 903 layout.sp_minus_fp = (layout.frame_size + INCOMING_FRAME_SP_OFFSET 904 - layout.fp_minus_ap); 905 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size; 906 return layout; 907 } 908 909 /* Determine how all the special registers get eliminated. */ 910 int 911 xstormy16_initial_elimination_offset (from, to) 912 int from, to; 913 { 914 struct xstormy16_stack_layout layout; 915 int result; 916 917 layout = xstormy16_compute_stack_layout (); 918 919 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM) 920 result = layout.sp_minus_fp - layout.locals_size; 921 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM) 922 result = -layout.locals_size; 923 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM) 924 result = -layout.fp_minus_ap; 925 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM) 926 result = -(layout.sp_minus_fp + layout.fp_minus_ap); 927 else 928 abort (); 929 930 return result; 931 } 932 933 static rtx 934 emit_addhi3_postreload (dest, src0, src1) 935 rtx dest; 936 rtx src0; 937 rtx src1; 938 { 939 rtx set, clobber, insn; 940 941 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1)); 942 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, 16)); 943 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber))); 944 return insn; 945 } 946 947 /* Called after register allocation to add any instructions needed for 948 the prologue. Using a prologue insn is favored compared to putting 949 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro, 950 since it allows the scheduler to intermix instructions with the 951 saves of the caller saved registers. In some cases, it might be 952 necessary to emit a barrier instruction as the last insn to prevent 953 such scheduling. 954 955 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1 956 so that the debug info generation code can handle them properly. */ 957 void 958 xstormy16_expand_prologue () 959 { 960 struct xstormy16_stack_layout layout; 961 int regno; 962 rtx insn; 963 rtx mem_push_rtx; 964 rtx mem_fake_push_rtx; 965 const int ifun = xstormy16_interrupt_function_p (); 966 967 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx); 968 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx); 969 mem_fake_push_rtx = gen_rtx_PRE_INC (Pmode, stack_pointer_rtx); 970 mem_fake_push_rtx = gen_rtx_MEM (HImode, mem_fake_push_rtx); 971 972 layout = xstormy16_compute_stack_layout (); 973 974 /* Save the argument registers if necessary. */ 975 if (layout.stdarg_save_size) 976 for (regno = FIRST_ARGUMENT_REGISTER; 977 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS; 978 regno++) 979 { 980 rtx reg = gen_rtx_REG (HImode, regno); 981 insn = emit_move_insn (mem_push_rtx, reg); 982 RTX_FRAME_RELATED_P (insn) = 1; 983 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, 984 gen_rtx_SET (VOIDmode, 985 mem_fake_push_rtx, 986 reg), 987 REG_NOTES (insn)); 988 } 989 990 /* Push each of the registers to save. */ 991 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 992 if (REG_NEEDS_SAVE (regno, ifun)) 993 { 994 rtx reg = gen_rtx_REG (HImode, regno); 995 insn = emit_move_insn (mem_push_rtx, reg); 996 RTX_FRAME_RELATED_P (insn) = 1; 997 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, 998 gen_rtx_SET (VOIDmode, 999 mem_fake_push_rtx, 1000 reg), 1001 REG_NOTES (insn)); 1002 } 1003 1004 /* It's just possible that the SP here might be what we need for 1005 the new FP... */ 1006 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size) 1007 { 1008 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx); 1009 RTX_FRAME_RELATED_P (insn) = 1; 1010 } 1011 1012 /* Allocate space for local variables. */ 1013 if (layout.locals_size) 1014 { 1015 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx, 1016 GEN_INT (layout.locals_size)); 1017 RTX_FRAME_RELATED_P (insn) = 1; 1018 } 1019 1020 /* Set up the frame pointer, if required. */ 1021 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size) 1022 { 1023 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx); 1024 RTX_FRAME_RELATED_P (insn) = 1; 1025 if (layout.sp_minus_fp) 1026 { 1027 insn = emit_addhi3_postreload (hard_frame_pointer_rtx, 1028 hard_frame_pointer_rtx, 1029 GEN_INT (-layout.sp_minus_fp)); 1030 RTX_FRAME_RELATED_P (insn) = 1; 1031 } 1032 } 1033 } 1034 1035 /* Do we need an epilogue at all? */ 1036 int 1037 direct_return () 1038 { 1039 return (reload_completed 1040 && xstormy16_compute_stack_layout ().frame_size == 0); 1041 } 1042 1043 /* Called after register allocation to add any instructions needed for 1044 the epilogue. Using an epilogue insn is favored compared to putting 1045 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro, 1046 since it allows the scheduler to intermix instructions with the 1047 saves of the caller saved registers. In some cases, it might be 1048 necessary to emit a barrier instruction as the last insn to prevent 1049 such scheduling. */ 1050 1051 void 1052 xstormy16_expand_epilogue () 1053 { 1054 struct xstormy16_stack_layout layout; 1055 rtx mem_pop_rtx; 1056 int regno; 1057 const int ifun = xstormy16_interrupt_function_p (); 1058 1059 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx); 1060 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx); 1061 1062 layout = xstormy16_compute_stack_layout (); 1063 1064 /* Pop the stack for the locals. */ 1065 if (layout.locals_size) 1066 { 1067 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size) 1068 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx); 1069 else 1070 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx, 1071 GEN_INT (- layout.locals_size)); 1072 } 1073 1074 /* Restore any call-saved registers. */ 1075 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--) 1076 if (REG_NEEDS_SAVE (regno, ifun)) 1077 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx); 1078 1079 /* Pop the stack for the stdarg save area. */ 1080 if (layout.stdarg_save_size) 1081 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx, 1082 GEN_INT (- layout.stdarg_save_size)); 1083 1084 /* Return. */ 1085 if (ifun) 1086 emit_jump_insn (gen_return_internal_interrupt ()); 1087 else 1088 emit_jump_insn (gen_return_internal ()); 1089 } 1090 1091 int 1092 xstormy16_epilogue_uses (regno) 1093 int regno; 1094 { 1095 if (reload_completed && call_used_regs[regno]) 1096 { 1097 const int ifun = xstormy16_interrupt_function_p (); 1098 return REG_NEEDS_SAVE (regno, ifun); 1099 } 1100 return 0; 1101 } 1102 1103 /* Return an updated summarizer variable CUM to advance past an 1104 argument in the argument list. The values MODE, TYPE and NAMED 1105 describe that argument. Once this is done, the variable CUM is 1106 suitable for analyzing the *following* argument with 1107 `FUNCTION_ARG', etc. 1108 1109 This function need not do anything if the argument in question was 1110 passed on the stack. The compiler knows how to track the amount of 1111 stack space used for arguments without any special help. However, 1112 it makes life easier for xstormy16_build_va_list if it does update 1113 the word count. */ 1114 CUMULATIVE_ARGS 1115 xstormy16_function_arg_advance (cum, mode, type, named) 1116 CUMULATIVE_ARGS cum; 1117 enum machine_mode mode; 1118 tree type; 1119 int named ATTRIBUTE_UNUSED; 1120 { 1121 /* If an argument would otherwise be passed partially in registers, 1122 and partially on the stack, the whole of it is passed on the 1123 stack. */ 1124 if (cum < NUM_ARGUMENT_REGISTERS 1125 && cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS) 1126 cum = NUM_ARGUMENT_REGISTERS; 1127 1128 cum += XSTORMY16_WORD_SIZE (type, mode); 1129 1130 return cum; 1131 } 1132 1133 /* Do any needed setup for a variadic function. CUM has not been updated 1134 for the last named argument which has type TYPE and mode MODE. */ 1135 void 1136 xstormy16_setup_incoming_varargs (cum, int_mode, type, pretend_size) 1137 CUMULATIVE_ARGS cum ATTRIBUTE_UNUSED; 1138 int int_mode ATTRIBUTE_UNUSED; 1139 tree type ATTRIBUTE_UNUSED; 1140 int * pretend_size ATTRIBUTE_UNUSED; 1141 { 1142 } 1143 1144 /* Build the va_list type. 1145 1146 For this chip, va_list is a record containing a counter and a pointer. 1147 The counter is of type 'int' and indicates how many bytes 1148 have been used to date. The pointer indicates the stack position 1149 for arguments that have not been passed in registers. 1150 To keep the layout nice, the pointer is first in the structure. */ 1151 1152 tree 1153 xstormy16_build_va_list () 1154 { 1155 tree f_1, f_2, record, type_decl; 1156 1157 record = (*lang_hooks.types.make_type) (RECORD_TYPE); 1158 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record); 1159 1160 f_1 = build_decl (FIELD_DECL, get_identifier ("base"), 1161 ptr_type_node); 1162 f_2 = build_decl (FIELD_DECL, get_identifier ("count"), 1163 unsigned_type_node); 1164 1165 DECL_FIELD_CONTEXT (f_1) = record; 1166 DECL_FIELD_CONTEXT (f_2) = record; 1167 1168 TREE_CHAIN (record) = type_decl; 1169 TYPE_NAME (record) = type_decl; 1170 TYPE_FIELDS (record) = f_1; 1171 TREE_CHAIN (f_1) = f_2; 1172 1173 layout_type (record); 1174 1175 return record; 1176 } 1177 1178 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this 1179 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list 1180 variable to initialize. NEXTARG is the machine independent notion of the 1181 'next' argument after the variable arguments. */ 1182 void 1183 xstormy16_expand_builtin_va_start (valist, nextarg) 1184 tree valist; 1185 rtx nextarg ATTRIBUTE_UNUSED; 1186 { 1187 tree f_base, f_count; 1188 tree base, count; 1189 tree t; 1190 1191 if (xstormy16_interrupt_function_p ()) 1192 error ("cannot use va_start in interrupt function"); 1193 1194 f_base = TYPE_FIELDS (va_list_type_node); 1195 f_count = TREE_CHAIN (f_base); 1196 1197 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base); 1198 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count); 1199 1200 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx); 1201 t = build (PLUS_EXPR, TREE_TYPE (base), t, 1202 build_int_2 (INCOMING_FRAME_SP_OFFSET, 0)); 1203 t = build (MODIFY_EXPR, TREE_TYPE (base), base, t); 1204 TREE_SIDE_EFFECTS (t) = 1; 1205 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 1206 1207 t = build (MODIFY_EXPR, TREE_TYPE (count), count, 1208 build_int_2 (current_function_args_info * UNITS_PER_WORD, 0)); 1209 TREE_SIDE_EFFECTS (t) = 1; 1210 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 1211 } 1212 1213 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable 1214 of type va_list as a tree, TYPE is the type passed to va_arg. 1215 Note: This algorithm is documented in stormy-abi. */ 1216 1217 rtx 1218 xstormy16_expand_builtin_va_arg (valist, type) 1219 tree valist; 1220 tree type; 1221 { 1222 tree f_base, f_count; 1223 tree base, count; 1224 rtx count_rtx, addr_rtx, r; 1225 rtx lab_gotaddr, lab_fromstack; 1226 tree t; 1227 int size, size_of_reg_args; 1228 tree size_tree, count_plus_size; 1229 rtx count_plus_size_rtx; 1230 1231 f_base = TYPE_FIELDS (va_list_type_node); 1232 f_count = TREE_CHAIN (f_base); 1233 1234 base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base); 1235 count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count); 1236 1237 size = PUSH_ROUNDING (int_size_in_bytes (type)); 1238 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD); 1239 1240 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD; 1241 1242 count_rtx = expand_expr (count, NULL_RTX, HImode, EXPAND_NORMAL); 1243 lab_gotaddr = gen_label_rtx (); 1244 lab_fromstack = gen_label_rtx (); 1245 addr_rtx = gen_reg_rtx (Pmode); 1246 1247 count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree); 1248 count_plus_size_rtx = expand_expr (count_plus_size, NULL_RTX, HImode, EXPAND_NORMAL); 1249 emit_cmp_and_jump_insns (count_plus_size_rtx, GEN_INT (size_of_reg_args), 1250 GTU, const1_rtx, HImode, 1, lab_fromstack); 1251 1252 t = build (PLUS_EXPR, ptr_type_node, base, count); 1253 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL); 1254 if (r != addr_rtx) 1255 emit_move_insn (addr_rtx, r); 1256 1257 emit_jump_insn (gen_jump (lab_gotaddr)); 1258 emit_barrier (); 1259 emit_label (lab_fromstack); 1260 1261 /* Arguments larger than a word might need to skip over some 1262 registers, since arguments are either passed entirely in 1263 registers or entirely on the stack. */ 1264 if (size > 2 || size < 0) 1265 { 1266 rtx lab_notransition = gen_label_rtx (); 1267 emit_cmp_and_jump_insns (count_rtx, GEN_INT (NUM_ARGUMENT_REGISTERS 1268 * UNITS_PER_WORD), 1269 GEU, const1_rtx, HImode, 1, lab_notransition); 1270 1271 t = build (MODIFY_EXPR, TREE_TYPE (count), count, 1272 build_int_2 (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD, 0)); 1273 TREE_SIDE_EFFECTS (t) = 1; 1274 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 1275 1276 emit_label (lab_notransition); 1277 } 1278 1279 t = build (PLUS_EXPR, sizetype, size_tree, 1280 build_int_2 ((- NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD 1281 + INCOMING_FRAME_SP_OFFSET), 1282 -1)); 1283 t = build (PLUS_EXPR, TREE_TYPE (count), count, fold (t)); 1284 t = build (MINUS_EXPR, TREE_TYPE (base), base, t); 1285 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL); 1286 if (r != addr_rtx) 1287 emit_move_insn (addr_rtx, r); 1288 1289 emit_label (lab_gotaddr); 1290 1291 count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree); 1292 t = build (MODIFY_EXPR, TREE_TYPE (count), count, count_plus_size); 1293 TREE_SIDE_EFFECTS (t) = 1; 1294 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 1295 1296 return addr_rtx; 1297 } 1298 1299 /* Initialize the variable parts of a trampoline. ADDR is an RTX for 1300 the address of the trampoline; FNADDR is an RTX for the address of 1301 the nested function; STATIC_CHAIN is an RTX for the static chain 1302 value that should be passed to the function when it is called. */ 1303 void 1304 xstormy16_initialize_trampoline (addr, fnaddr, static_chain) 1305 rtx addr; 1306 rtx fnaddr; 1307 rtx static_chain; 1308 { 1309 rtx reg_addr = gen_reg_rtx (Pmode); 1310 rtx temp = gen_reg_rtx (HImode); 1311 rtx reg_fnaddr = gen_reg_rtx (HImode); 1312 rtx reg_addr_mem; 1313 1314 reg_addr_mem = gen_rtx_MEM (HImode, reg_addr); 1315 1316 emit_move_insn (reg_addr, addr); 1317 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM)); 1318 emit_move_insn (reg_addr_mem, temp); 1319 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx)); 1320 emit_move_insn (temp, static_chain); 1321 emit_move_insn (reg_addr_mem, temp); 1322 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx)); 1323 emit_move_insn (reg_fnaddr, fnaddr); 1324 emit_move_insn (temp, reg_fnaddr); 1325 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF))); 1326 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200))); 1327 emit_move_insn (reg_addr_mem, temp); 1328 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx)); 1329 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8))); 1330 emit_move_insn (reg_addr_mem, reg_fnaddr); 1331 } 1332 1333 /* Create an RTX representing the place where a function returns a 1334 value of data type VALTYPE. VALTYPE is a tree node representing a 1335 data type. Write `TYPE_MODE (VALTYPE)' to get the machine mode 1336 used to represent that type. On many machines, only the mode is 1337 relevant. (Actually, on most machines, scalar values are returned 1338 in the same place regardless of mode). 1339 1340 If `PROMOTE_FUNCTION_RETURN' is defined, you must apply the same promotion 1341 rules specified in `PROMOTE_MODE' if VALTYPE is a scalar type. 1342 1343 If the precise function being called is known, FUNC is a tree node 1344 (`FUNCTION_DECL') for it; otherwise, FUNC is a null pointer. This makes it 1345 possible to use a different value-returning convention for specific 1346 functions when all their calls are known. 1347 1348 `FUNCTION_VALUE' is not used for return vales with aggregate data types, 1349 because these are returned in another way. See `STRUCT_VALUE_REGNUM' and 1350 related macros. */ 1351 rtx 1352 xstormy16_function_value (valtype, func) 1353 tree valtype; 1354 tree func ATTRIBUTE_UNUSED; 1355 { 1356 enum machine_mode mode; 1357 mode = TYPE_MODE (valtype); 1358 PROMOTE_MODE (mode, 0, valtype); 1359 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM); 1360 } 1361 1362 /* A C compound statement that outputs the assembler code for a thunk function, 1363 used to implement C++ virtual function calls with multiple inheritance. The 1364 thunk acts as a wrapper around a virtual function, adjusting the implicit 1365 object parameter before handing control off to the real function. 1366 1367 First, emit code to add the integer DELTA to the location that contains the 1368 incoming first argument. Assume that this argument contains a pointer, and 1369 is the one used to pass the `this' pointer in C++. This is the incoming 1370 argument *before* the function prologue, e.g. `%o0' on a sparc. The 1371 addition must preserve the values of all other incoming arguments. 1372 1373 After the addition, emit code to jump to FUNCTION, which is a 1374 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch 1375 the return address. Hence returning from FUNCTION will return to whoever 1376 called the current `thunk'. 1377 1378 The effect must be as if @var{function} had been called directly 1379 with the adjusted first argument. This macro is responsible for 1380 emitting all of the code for a thunk function; 1381 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are 1382 not invoked. 1383 1384 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been 1385 extracted from it.) It might possibly be useful on some targets, but 1386 probably not. */ 1387 1388 static void 1389 xstormy16_asm_output_mi_thunk (file, thunk_fndecl, delta, 1390 vcall_offset, function) 1391 FILE *file; 1392 tree thunk_fndecl ATTRIBUTE_UNUSED; 1393 HOST_WIDE_INT delta; 1394 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED; 1395 tree function; 1396 { 1397 int regnum = FIRST_ARGUMENT_REGISTER; 1398 1399 /* There might be a hidden first argument for a returned structure. */ 1400 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)))) 1401 regnum += 1; 1402 1403 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF); 1404 fputs ("\tjmpf ", file); 1405 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0)); 1406 putc ('\n', file); 1407 } 1408 1409 /* Mark functions with SYMBOL_REF_FLAG. */ 1410 1411 static void 1412 xstormy16_encode_section_info (decl, first) 1413 tree decl; 1414 int first ATTRIBUTE_UNUSED; 1415 { 1416 if (TREE_CODE (decl) == FUNCTION_DECL) 1417 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1; 1418 } 1419 1420 /* Output constructors and destructors. Just like 1421 default_named_section_asm_out_* but don't set the sections writable. */ 1422 #undef TARGET_ASM_CONSTRUCTOR 1423 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor 1424 #undef TARGET_ASM_DESTRUCTOR 1425 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor 1426 1427 static void 1428 xstormy16_asm_out_destructor (symbol, priority) 1429 rtx symbol; 1430 int priority; 1431 { 1432 const char *section = ".dtors"; 1433 char buf[16]; 1434 1435 /* ??? This only works reliably with the GNU linker. */ 1436 if (priority != DEFAULT_INIT_PRIORITY) 1437 { 1438 sprintf (buf, ".dtors.%.5u", 1439 /* Invert the numbering so the linker puts us in the proper 1440 order; constructors are run from right to left, and the 1441 linker sorts in increasing order. */ 1442 MAX_INIT_PRIORITY - priority); 1443 section = buf; 1444 } 1445 1446 named_section_flags (section, 0); 1447 assemble_align (POINTER_SIZE); 1448 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1); 1449 } 1450 1451 static void 1452 xstormy16_asm_out_constructor (symbol, priority) 1453 rtx symbol; 1454 int priority; 1455 { 1456 const char *section = ".ctors"; 1457 char buf[16]; 1458 1459 /* ??? This only works reliably with the GNU linker. */ 1460 if (priority != DEFAULT_INIT_PRIORITY) 1461 { 1462 sprintf (buf, ".ctors.%.5u", 1463 /* Invert the numbering so the linker puts us in the proper 1464 order; constructors are run from right to left, and the 1465 linker sorts in increasing order. */ 1466 MAX_INIT_PRIORITY - priority); 1467 section = buf; 1468 } 1469 1470 named_section_flags (section, 0); 1471 assemble_align (POINTER_SIZE); 1472 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1); 1473 } 1474 1475 /* Print a memory address as an operand to reference that memory location. */ 1476 void 1477 xstormy16_print_operand_address (file, address) 1478 FILE * file; 1479 rtx address; 1480 { 1481 HOST_WIDE_INT offset; 1482 int pre_dec, post_inc; 1483 1484 /* There are a few easy cases. */ 1485 if (GET_CODE (address) == CONST_INT) 1486 { 1487 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF); 1488 return; 1489 } 1490 1491 if (CONSTANT_P (address) || GET_CODE (address) == CODE_LABEL) 1492 { 1493 output_addr_const (file, address); 1494 return; 1495 } 1496 1497 /* Otherwise, it's hopefully something of the form 1498 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)) 1499 */ 1500 1501 if (GET_CODE (address) == PLUS) 1502 { 1503 if (GET_CODE (XEXP (address, 1)) != CONST_INT) 1504 abort (); 1505 offset = INTVAL (XEXP (address, 1)); 1506 address = XEXP (address, 0); 1507 } 1508 else 1509 offset = 0; 1510 1511 pre_dec = (GET_CODE (address) == PRE_DEC); 1512 post_inc = (GET_CODE (address) == POST_INC); 1513 if (pre_dec || post_inc) 1514 address = XEXP (address, 0); 1515 1516 if (GET_CODE (address) != REG) 1517 abort (); 1518 1519 fputc ('(', file); 1520 if (pre_dec) 1521 fputs ("--", file); 1522 fputs (reg_names [REGNO (address)], file); 1523 if (post_inc) 1524 fputs ("++", file); 1525 if (offset != 0) 1526 { 1527 fputc (',', file); 1528 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset); 1529 } 1530 fputc (')', file); 1531 } 1532 1533 /* Print an operand to an assembler instruction. */ 1534 void 1535 xstormy16_print_operand (file, x, code) 1536 FILE * file; 1537 rtx x; 1538 int code; 1539 { 1540 switch (code) 1541 { 1542 case 'B': 1543 /* There is either one bit set, or one bit clear, in X. 1544 Print it preceded by '#'. */ 1545 { 1546 HOST_WIDE_INT xx = 1; 1547 HOST_WIDE_INT l; 1548 1549 if (GET_CODE (x) == CONST_INT) 1550 xx = INTVAL (x); 1551 else 1552 output_operand_lossage ("`B' operand is not constant"); 1553 1554 l = exact_log2 (xx); 1555 if (l == -1) 1556 l = exact_log2 (~xx); 1557 if (l == -1) 1558 output_operand_lossage ("`B' operand has multiple bits set"); 1559 1560 fputs (IMMEDIATE_PREFIX, file); 1561 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l); 1562 return; 1563 } 1564 1565 case 'C': 1566 /* Print the symbol without a surrounding @fptr(). */ 1567 if (GET_CODE (x) == SYMBOL_REF) 1568 assemble_name (file, XSTR (x, 0)); 1569 else if (GET_CODE (x) == LABEL_REF) 1570 output_asm_label (x); 1571 else 1572 xstormy16_print_operand_address (file, x); 1573 return; 1574 1575 case 'o': 1576 case 'O': 1577 /* Print the immediate operand less one, preceded by '#'. 1578 For 'O', negate it first. */ 1579 { 1580 HOST_WIDE_INT xx = 0; 1581 1582 if (GET_CODE (x) == CONST_INT) 1583 xx = INTVAL (x); 1584 else 1585 output_operand_lossage ("`o' operand is not constant"); 1586 1587 if (code == 'O') 1588 xx = -xx; 1589 1590 fputs (IMMEDIATE_PREFIX, file); 1591 fprintf (file, HOST_WIDE_INT_PRINT_DEC, xx - 1); 1592 return; 1593 } 1594 1595 case 0: 1596 /* Handled below. */ 1597 break; 1598 1599 default: 1600 output_operand_lossage ("xstormy16_print_operand: unknown code"); 1601 return; 1602 } 1603 1604 switch (GET_CODE (x)) 1605 { 1606 case REG: 1607 fputs (reg_names [REGNO (x)], file); 1608 break; 1609 1610 case MEM: 1611 xstormy16_print_operand_address (file, XEXP (x, 0)); 1612 break; 1613 1614 default: 1615 /* Some kind of constant or label; an immediate operand, 1616 so prefix it with '#' for the assembler. */ 1617 fputs (IMMEDIATE_PREFIX, file); 1618 output_addr_const (file, x); 1619 break; 1620 } 1621 1622 return; 1623 } 1624 1625 1626 /* Expander for the `casesi' pattern. 1627 INDEX is the index of the switch statement. 1628 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding 1629 to the first table entry. 1630 RANGE is the number of table entries. 1631 TABLE is an ADDR_VEC that is the jump table. 1632 DEFAULT_LABEL is the address to branch to if INDEX is outside the 1633 range LOWER_BOUND to LOWER_BOUND+RANGE-1. 1634 */ 1635 1636 void 1637 xstormy16_expand_casesi (index, lower_bound, range, table, default_label) 1638 rtx index; 1639 rtx lower_bound; 1640 rtx range; 1641 rtx table; 1642 rtx default_label; 1643 { 1644 HOST_WIDE_INT range_i = INTVAL (range); 1645 rtx int_index; 1646 1647 /* This code uses 'br', so it can deal only with tables of size up to 1648 8192 entries. */ 1649 if (range_i >= 8192) 1650 sorry ("switch statement of size %lu entries too large", 1651 (unsigned long) range_i); 1652 1653 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0, 1654 OPTAB_LIB_WIDEN); 1655 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1, 1656 default_label); 1657 int_index = gen_lowpart_common (HImode, index); 1658 emit_insn (gen_ashlhi3 (int_index, int_index, GEN_INT (2))); 1659 emit_jump_insn (gen_tablejump_pcrel (int_index, table)); 1660 } 1661 1662 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf' 1663 instructions, without label or alignment or any other special 1664 constructs. We know that the previous instruction will be the 1665 `tablejump_pcrel' output above. 1666 1667 TODO: it might be nice to output 'br' instructions if they could 1668 all reach. */ 1669 1670 void 1671 xstormy16_output_addr_vec (file, label, table) 1672 FILE *file; 1673 rtx label ATTRIBUTE_UNUSED; 1674 rtx table; 1675 { 1676 int vlen, idx; 1677 1678 function_section (current_function_decl); 1679 1680 vlen = XVECLEN (table, 0); 1681 for (idx = 0; idx < vlen; idx++) 1682 { 1683 fputs ("\tjmpf ", file); 1684 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0)); 1685 fputc ('\n', file); 1686 } 1687 } 1688 1689 1690 /* Expander for the `call' patterns. 1691 INDEX is the index of the switch statement. 1692 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding 1693 to the first table entry. 1694 RANGE is the number of table entries. 1695 TABLE is an ADDR_VEC that is the jump table. 1696 DEFAULT_LABEL is the address to branch to if INDEX is outside the 1697 range LOWER_BOUND to LOWER_BOUND+RANGE-1. 1698 */ 1699 1700 void 1701 xstormy16_expand_call (retval, dest, counter) 1702 rtx retval; 1703 rtx dest; 1704 rtx counter; 1705 { 1706 rtx call, temp; 1707 enum machine_mode mode; 1708 1709 if (GET_CODE (dest) != MEM) 1710 abort (); 1711 dest = XEXP (dest, 0); 1712 1713 if (! CONSTANT_P (dest) 1714 && GET_CODE (dest) != REG) 1715 dest = force_reg (Pmode, dest); 1716 1717 if (retval == NULL) 1718 mode = VOIDmode; 1719 else 1720 mode = GET_MODE (retval); 1721 1722 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest), 1723 counter); 1724 if (retval) 1725 call = gen_rtx_SET (VOIDmode, retval, call); 1726 1727 if (! CONSTANT_P (dest)) 1728 { 1729 temp = gen_reg_rtx (HImode); 1730 emit_move_insn (temp, const0_rtx); 1731 } 1732 else 1733 temp = const0_rtx; 1734 1735 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call, 1736 gen_rtx_USE (VOIDmode, temp))); 1737 emit_call_insn (call); 1738 } 1739 1740 /* Expanders for multiword computational operations. */ 1741 1742 /* Expander for arithmetic operations; emit insns to compute 1743 1744 (set DEST (CODE:MODE SRC0 SRC1)) 1745 1746 using CARRY as a temporary. When CODE is COMPARE, a branch 1747 template is generated (this saves duplicating code in 1748 xstormy16_split_cbranch). */ 1749 1750 void 1751 xstormy16_expand_arith (mode, code, dest, src0, src1, carry) 1752 enum machine_mode mode; 1753 enum rtx_code code; 1754 rtx dest; 1755 rtx src0; 1756 rtx src1; 1757 rtx carry; 1758 { 1759 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD; 1760 int i; 1761 int firstloop = 1; 1762 1763 if (code == NEG) 1764 { 1765 rtx zero_reg = gen_reg_rtx (word_mode); 1766 emit_move_insn (zero_reg, src0); 1767 src0 = zero_reg; 1768 } 1769 1770 for (i = 0; i < num_words; i++) 1771 { 1772 rtx w_src0, w_src1, w_dest; 1773 rtx insn; 1774 1775 if (code == NEG) 1776 w_src0 = src0; 1777 else 1778 w_src0 = simplify_gen_subreg (word_mode, src0, mode, 1779 i * UNITS_PER_WORD); 1780 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD); 1781 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD); 1782 1783 switch (code) 1784 { 1785 case PLUS: 1786 if (firstloop 1787 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0) 1788 continue; 1789 1790 if (firstloop) 1791 insn = gen_addchi4 (w_dest, w_src0, w_src1, carry); 1792 else 1793 insn = gen_addchi5 (w_dest, w_src0, w_src1, carry, carry); 1794 break; 1795 1796 case NEG: 1797 case MINUS: 1798 case COMPARE: 1799 if (code == COMPARE && i == num_words - 1) 1800 { 1801 rtx branch, sub, clobber, sub_1; 1802 1803 sub_1 = gen_rtx_MINUS (HImode, w_src0, 1804 gen_rtx_ZERO_EXTEND (HImode, carry)); 1805 sub = gen_rtx_SET (VOIDmode, w_dest, 1806 gen_rtx_MINUS (HImode, sub_1, w_src1)); 1807 clobber = gen_rtx_CLOBBER (VOIDmode, carry); 1808 branch = gen_rtx_SET (VOIDmode, pc_rtx, 1809 gen_rtx_IF_THEN_ELSE (VOIDmode, 1810 gen_rtx_EQ (HImode, 1811 sub_1, 1812 w_src1), 1813 pc_rtx, 1814 pc_rtx)); 1815 insn = gen_rtx_PARALLEL (VOIDmode, 1816 gen_rtvec (3, branch, sub, clobber)); 1817 } 1818 else if (firstloop 1819 && code != COMPARE 1820 && GET_CODE (w_src1) == CONST_INT && INTVAL (w_src1) == 0) 1821 continue; 1822 else if (firstloop) 1823 insn = gen_subchi4 (w_dest, w_src0, w_src1, carry); 1824 else 1825 insn = gen_subchi5 (w_dest, w_src0, w_src1, carry, carry); 1826 break; 1827 1828 case IOR: 1829 case XOR: 1830 case AND: 1831 if (GET_CODE (w_src1) == CONST_INT 1832 && INTVAL (w_src1) == -(code == AND)) 1833 continue; 1834 1835 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx (code, mode, 1836 w_src0, w_src1)); 1837 break; 1838 1839 case NOT: 1840 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0)); 1841 break; 1842 1843 default: 1844 abort (); 1845 } 1846 1847 firstloop = 0; 1848 emit (insn); 1849 } 1850 } 1851 1852 /* Return 1 if OP is a shift operator. */ 1853 1854 int 1855 shift_operator (op, mode) 1856 register rtx op; 1857 enum machine_mode mode ATTRIBUTE_UNUSED; 1858 { 1859 enum rtx_code code = GET_CODE (op); 1860 1861 return (code == ASHIFT 1862 || code == ASHIFTRT 1863 || code == LSHIFTRT); 1864 } 1865 1866 /* The shift operations are split at output time for constant values; 1867 variable-width shifts get handed off to a library routine. 1868 1869 Generate an output string to do (set X (CODE:MODE X SIZE_R)) 1870 SIZE_R will be a CONST_INT, X will be a hard register. */ 1871 1872 const char * 1873 xstormy16_output_shift (mode, code, x, size_r, temp) 1874 enum machine_mode mode; 1875 enum rtx_code code; 1876 rtx x; 1877 rtx size_r; 1878 rtx temp; 1879 { 1880 HOST_WIDE_INT size; 1881 const char *r0, *r1, *rt; 1882 static char r[64]; 1883 1884 if (GET_CODE (size_r) != CONST_INT 1885 || GET_CODE (x) != REG 1886 || mode != SImode) 1887 abort (); 1888 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1); 1889 1890 if (size == 0) 1891 return ""; 1892 1893 r0 = reg_names [REGNO (x)]; 1894 r1 = reg_names [REGNO (x) + 1]; 1895 1896 /* For shifts of size 1, we can use the rotate instructions. */ 1897 if (size == 1) 1898 { 1899 switch (code) 1900 { 1901 case ASHIFT: 1902 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1); 1903 break; 1904 case ASHIFTRT: 1905 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0); 1906 break; 1907 case LSHIFTRT: 1908 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0); 1909 break; 1910 default: 1911 abort (); 1912 } 1913 return r; 1914 } 1915 1916 /* For large shifts, there are easy special cases. */ 1917 if (size == 16) 1918 { 1919 switch (code) 1920 { 1921 case ASHIFT: 1922 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0); 1923 break; 1924 case ASHIFTRT: 1925 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1); 1926 break; 1927 case LSHIFTRT: 1928 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1); 1929 break; 1930 default: 1931 abort (); 1932 } 1933 return r; 1934 } 1935 if (size > 16) 1936 { 1937 switch (code) 1938 { 1939 case ASHIFT: 1940 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d", 1941 r1, r0, r0, r1, (int) size - 16); 1942 break; 1943 case ASHIFTRT: 1944 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d", 1945 r0, r1, r1, r0, (int) size - 16); 1946 break; 1947 case LSHIFTRT: 1948 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d", 1949 r0, r1, r1, r0, (int) size - 16); 1950 break; 1951 default: 1952 abort (); 1953 } 1954 return r; 1955 } 1956 1957 /* For the rest, we have to do more work. In particular, we 1958 need a temporary. */ 1959 rt = reg_names [REGNO (temp)]; 1960 switch (code) 1961 { 1962 case ASHIFT: 1963 sprintf (r, 1964 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s", 1965 rt, r0, r0, (int) size, r1, (int) size, rt, (int) 16-size, 1966 r1, rt); 1967 break; 1968 case ASHIFTRT: 1969 sprintf (r, 1970 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s", 1971 rt, r1, r1, (int) size, r0, (int) size, rt, (int) 16-size, 1972 r0, rt); 1973 break; 1974 case LSHIFTRT: 1975 sprintf (r, 1976 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s", 1977 rt, r1, r1, (int) size, r0, (int) size, rt, (int) 16-size, 1978 r0, rt); 1979 break; 1980 default: 1981 abort (); 1982 } 1983 return r; 1984 } 1985 1986 /* Attribute handling. */ 1987 1988 /* Return nonzero if the function is an interrupt function. */ 1989 int 1990 xstormy16_interrupt_function_p () 1991 { 1992 tree attributes; 1993 1994 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before 1995 any functions are declared, which is demonstrably wrong, but 1996 it is worked around here. FIXME. */ 1997 if (!cfun) 1998 return 0; 1999 2000 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl)); 2001 return lookup_attribute ("interrupt", attributes) != NULL_TREE; 2002 } 2003 2004 #undef TARGET_ATTRIBUTE_TABLE 2005 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table 2006 static tree xstormy16_handle_interrupt_attribute PARAMS ((tree *, tree, tree, int, bool *)); 2007 static const struct attribute_spec xstormy16_attribute_table[] = 2008 { 2009 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */ 2010 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute }, 2011 { NULL, 0, 0, false, false, false, NULL } 2012 }; 2013 2014 /* Handle an "interrupt" attribute; 2015 arguments as in struct attribute_spec.handler. */ 2016 static tree 2017 xstormy16_handle_interrupt_attribute (node, name, args, flags, no_add_attrs) 2018 tree *node; 2019 tree name; 2020 tree args ATTRIBUTE_UNUSED; 2021 int flags ATTRIBUTE_UNUSED; 2022 bool *no_add_attrs; 2023 { 2024 if (TREE_CODE (*node) != FUNCTION_TYPE) 2025 { 2026 warning ("`%s' attribute only applies to functions", 2027 IDENTIFIER_POINTER (name)); 2028 *no_add_attrs = true; 2029 } 2030 2031 return NULL_TREE; 2032 } 2033 2034 #undef TARGET_INIT_BUILTINS 2035 #define TARGET_INIT_BUILTINS xstormy16_init_builtins 2036 #undef TARGET_EXPAND_BUILTIN 2037 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin 2038 2039 static struct { 2040 const char *name; 2041 int md_code; 2042 const char *arg_ops; /* 0..9, t for temp register, r for return value */ 2043 const char *arg_types; /* s=short,l=long, upper case for unsigned */ 2044 } s16builtins[] = { 2045 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" }, 2046 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" }, 2047 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" }, 2048 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" }, 2049 { 0, 0, 0, 0 } 2050 }; 2051 2052 static void 2053 xstormy16_init_builtins () 2054 { 2055 tree args, ret_type, arg; 2056 int i, a; 2057 2058 ret_type = void_type_node; 2059 2060 for (i=0; s16builtins[i].name; i++) 2061 { 2062 args = void_list_node; 2063 for (a=strlen (s16builtins[i].arg_types)-1; a>=0; a--) 2064 { 2065 switch (s16builtins[i].arg_types[a]) 2066 { 2067 case 's': arg = short_integer_type_node; break; 2068 case 'S': arg = short_unsigned_type_node; break; 2069 case 'l': arg = long_integer_type_node; break; 2070 case 'L': arg = long_unsigned_type_node; break; 2071 default: abort(); 2072 } 2073 if (a == 0) 2074 ret_type = arg; 2075 else 2076 args = tree_cons (NULL_TREE, arg, args); 2077 } 2078 builtin_function (s16builtins[i].name, 2079 build_function_type (ret_type, args), 2080 i, BUILT_IN_MD, NULL, NULL); 2081 } 2082 } 2083 2084 static rtx 2085 xstormy16_expand_builtin(exp, target, subtarget, mode, ignore) 2086 tree exp; 2087 rtx target; 2088 rtx subtarget ATTRIBUTE_UNUSED; 2089 enum machine_mode mode ATTRIBUTE_UNUSED; 2090 int ignore ATTRIBUTE_UNUSED; 2091 { 2092 rtx op[10], args[10], pat, copyto[10], retval = 0; 2093 tree fndecl, argtree; 2094 int i, a, o, code; 2095 2096 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); 2097 argtree = TREE_OPERAND (exp, 1); 2098 i = DECL_FUNCTION_CODE (fndecl); 2099 code = s16builtins[i].md_code; 2100 2101 for (a = 0; a < 10 && argtree; a++) 2102 { 2103 args[a] = expand_expr (TREE_VALUE (argtree), NULL_RTX, VOIDmode, 0); 2104 argtree = TREE_CHAIN (argtree); 2105 } 2106 2107 for (o = 0; s16builtins[i].arg_ops[o]; o++) 2108 { 2109 char ao = s16builtins[i].arg_ops[o]; 2110 char c = insn_data[code].operand[o].constraint[0]; 2111 int omode; 2112 2113 copyto[o] = 0; 2114 2115 omode = insn_data[code].operand[o].mode; 2116 if (ao == 'r') 2117 op[o] = target ? target : gen_reg_rtx (omode); 2118 else if (ao == 't') 2119 op[o] = gen_reg_rtx (omode); 2120 else 2121 op[o] = args[(int) hex_value (ao)]; 2122 2123 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o]))) 2124 { 2125 if (c == '+' || c == '=') 2126 { 2127 copyto[o] = op[o]; 2128 op[o] = gen_reg_rtx (omode); 2129 } 2130 else 2131 op[o] = copy_to_mode_reg (omode, op[o]); 2132 } 2133 2134 if (ao == 'r') 2135 retval = op[o]; 2136 } 2137 2138 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4], 2139 op[5], op[6], op[7], op[8], op[9]); 2140 emit_insn (pat); 2141 2142 for (o = 0; s16builtins[i].arg_ops[o]; o++) 2143 if (copyto[o]) 2144 { 2145 emit_move_insn (copyto[o], op[o]); 2146 if (op[o] == retval) 2147 retval = copyto[o]; 2148 } 2149 2150 return retval; 2151 } 2152 2153 2154 #undef TARGET_ASM_ALIGNED_HI_OP 2155 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t" 2156 #undef TARGET_ASM_ALIGNED_SI_OP 2157 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t" 2158 #undef TARGET_ENCODE_SECTION_INFO 2159 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info 2160 2161 #undef TARGET_ASM_OUTPUT_MI_THUNK 2162 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk 2163 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK 2164 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall 2165 2166 struct gcc_target targetm = TARGET_INITIALIZER; 2167