1;;- Machine description for ARM for GNU compiler 2;; Copyright (C) 1991-2021 Free Software Foundation, Inc. 3;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl) 4;; and Martin Simmons (@harleqn.co.uk). 5;; More major hacks by Richard Earnshaw (rearnsha@arm.com). 6 7;; This file is part of GCC. 8 9;; GCC is free software; you can redistribute it and/or modify it 10;; under the terms of the GNU General Public License as published 11;; by the Free Software Foundation; either version 3, or (at your 12;; option) any later version. 13 14;; GCC is distributed in the hope that it will be useful, but WITHOUT 15;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY 16;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public 17;; License for more details. 18 19;; You should have received a copy of the GNU General Public License 20;; along with GCC; see the file COPYING3. If not see 21;; <http://www.gnu.org/licenses/>. 22 23;;- See file "rtl.def" for documentation on define_insn, match_*, et. al. 24 25 26;;--------------------------------------------------------------------------- 27;; Constants 28 29;; Register numbers -- All machine registers should be defined here 30(define_constants 31 [(R0_REGNUM 0) ; First CORE register 32 (R1_REGNUM 1) ; Second CORE register 33 (R4_REGNUM 4) ; Fifth CORE register 34 (FDPIC_REGNUM 9) ; FDPIC register 35 (IP_REGNUM 12) ; Scratch register 36 (SP_REGNUM 13) ; Stack pointer 37 (LR_REGNUM 14) ; Return address register 38 (PC_REGNUM 15) ; Program counter 39 (LAST_ARM_REGNUM 15) ; 40 (CC_REGNUM 100) ; Condition code pseudo register 41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register 42 (APSRQ_REGNUM 104) ; Q bit pseudo register 43 (APSRGE_REGNUM 105) ; GE bits pseudo register 44 (VPR_REGNUM 106) ; Vector Predication Register - MVE register. 45 ] 46) 47;; 3rd operand to select_dominance_cc_mode 48(define_constants 49 [(DOM_CC_X_AND_Y 0) 50 (DOM_CC_NX_OR_Y 1) 51 (DOM_CC_X_OR_Y 2) 52 ] 53) 54;; conditional compare combination 55(define_constants 56 [(CMP_CMP 0) 57 (CMN_CMP 1) 58 (CMP_CMN 2) 59 (CMN_CMN 3) 60 (NUM_OF_COND_CMP 4) 61 ] 62) 63 64 65;;--------------------------------------------------------------------------- 66;; Attributes 67 68;; Processor type. This is created automatically from arm-cores.def. 69(include "arm-tune.md") 70 71;; Instruction classification types 72(include "types.md") 73 74; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when 75; generating ARM code. This is used to control the length of some insn 76; patterns that share the same RTL in both ARM and Thumb code. 77(define_attr "is_thumb" "yes,no" 78 (const (if_then_else (symbol_ref "TARGET_THUMB") 79 (const_string "yes") (const_string "no")))) 80 81; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6. 82(define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6"))) 83 84; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code. 85(define_attr "is_thumb1" "yes,no" 86 (const (if_then_else (symbol_ref "TARGET_THUMB1") 87 (const_string "yes") (const_string "no")))) 88 89; Mark an instruction as suitable for "short IT" blocks in Thumb-2. 90; The arm_restrict_it flag enables the "short IT" feature which 91; restricts IT blocks to a single 16-bit instruction. 92; This attribute should only be used on 16-bit Thumb-2 instructions 93; which may be predicated (the "predicable" attribute must be set). 94(define_attr "predicable_short_it" "no,yes" (const_string "no")) 95 96; Mark an instruction as suitable for "short IT" blocks in Thumb-2. 97; This attribute should only be used on instructions which may emit 98; an IT block in their expansion which is not a short IT. 99(define_attr "enabled_for_short_it" "no,yes" (const_string "yes")) 100 101; Mark an instruction sequence as the required way of loading a 102; constant when -mpure-code is enabled (which implies 103; arm_disable_literal_pool) 104(define_attr "required_for_purecode" "no,yes" (const_string "no")) 105 106;; Operand number of an input operand that is shifted. Zero if the 107;; given instruction does not shift one of its input operands. 108(define_attr "shift" "" (const_int 0)) 109 110;; [For compatibility with AArch64 in pipeline models] 111;; Attribute that specifies whether or not the instruction touches fp 112;; registers. 113(define_attr "fp" "no,yes" (const_string "no")) 114 115; Floating Point Unit. If we only have floating point emulation, then there 116; is no point in scheduling the floating point insns. (Well, for best 117; performance we should try and group them together). 118(define_attr "fpu" "none,vfp" 119 (const (symbol_ref "arm_fpu_attr"))) 120 121; Predicated means that the insn form is conditionally executed based on a 122; predicate. We default to 'no' because no Thumb patterns match this rule 123; and not all ARM insns do. 124(define_attr "predicated" "yes,no" (const_string "no")) 125 126; LENGTH of an instruction (in bytes) 127(define_attr "length" "" 128 (const_int 4)) 129 130; The architecture which supports the instruction (or alternative). 131; This can be "a" for ARM, "t" for either of the Thumbs, "32" for 132; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6" 133; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without 134; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M 135; Baseline. This attribute is used to compute attribute "enabled", 136; use type "any" to enable an alternative in all cases. 137(define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon,mve" 138 (const_string "any")) 139 140(define_attr "arch_enabled" "no,yes" 141 (cond [(eq_attr "arch" "any") 142 (const_string "yes") 143 144 (and (eq_attr "arch" "a") 145 (match_test "TARGET_ARM")) 146 (const_string "yes") 147 148 (and (eq_attr "arch" "t") 149 (match_test "TARGET_THUMB")) 150 (const_string "yes") 151 152 (and (eq_attr "arch" "t1") 153 (match_test "TARGET_THUMB1")) 154 (const_string "yes") 155 156 (and (eq_attr "arch" "t2") 157 (match_test "TARGET_THUMB2")) 158 (const_string "yes") 159 160 (and (eq_attr "arch" "32") 161 (match_test "TARGET_32BIT")) 162 (const_string "yes") 163 164 (and (eq_attr "arch" "v6") 165 (match_test "TARGET_32BIT && arm_arch6")) 166 (const_string "yes") 167 168 (and (eq_attr "arch" "nov6") 169 (match_test "TARGET_32BIT && !arm_arch6")) 170 (const_string "yes") 171 172 (and (eq_attr "arch" "v6t2") 173 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2")) 174 (const_string "yes") 175 176 (and (eq_attr "arch" "v8mb") 177 (match_test "TARGET_THUMB1 && arm_arch8")) 178 (const_string "yes") 179 180 (and (eq_attr "arch" "iwmmxt2") 181 (match_test "TARGET_REALLY_IWMMXT2")) 182 (const_string "yes") 183 184 (and (eq_attr "arch" "armv6_or_vfpv3") 185 (match_test "arm_arch6 || TARGET_VFP3")) 186 (const_string "yes") 187 188 (and (eq_attr "arch" "neon") 189 (match_test "TARGET_NEON")) 190 (const_string "yes") 191 192 (and (eq_attr "arch" "mve") 193 (match_test "TARGET_HAVE_MVE")) 194 (const_string "yes") 195 ] 196 197 (const_string "no"))) 198 199(define_attr "opt" "any,speed,size" 200 (const_string "any")) 201 202(define_attr "opt_enabled" "no,yes" 203 (cond [(eq_attr "opt" "any") 204 (const_string "yes") 205 206 (and (eq_attr "opt" "speed") 207 (match_test "optimize_function_for_speed_p (cfun)")) 208 (const_string "yes") 209 210 (and (eq_attr "opt" "size") 211 (match_test "optimize_function_for_size_p (cfun)")) 212 (const_string "yes")] 213 (const_string "no"))) 214 215(define_attr "use_literal_pool" "no,yes" 216 (cond [(and (eq_attr "type" "f_loads,f_loadd") 217 (match_test "CONSTANT_P (operands[1])")) 218 (const_string "yes")] 219 (const_string "no"))) 220 221; Enable all alternatives that are both arch_enabled and insn_enabled. 222; FIXME:: opt_enabled has been temporarily removed till the time we have 223; an attribute that allows the use of such alternatives. 224; This depends on caching of speed_p, size_p on a per 225; alternative basis. The problem is that the enabled attribute 226; cannot depend on any state that is not cached or is not constant 227; for a compilation unit. We probably need a generic "hot/cold" 228; alternative which if implemented can help with this. We disable this 229; until such a time as this is implemented and / or the improvements or 230; regressions with removing this attribute are double checked. 231; See ashldi3_neon and <shift>di3_neon in neon.md. 232 233 (define_attr "enabled" "no,yes" 234 (cond [(and (eq_attr "predicable_short_it" "no") 235 (and (eq_attr "predicated" "yes") 236 (match_test "arm_restrict_it"))) 237 (const_string "no") 238 239 (and (eq_attr "enabled_for_short_it" "no") 240 (match_test "arm_restrict_it")) 241 (const_string "no") 242 243 (and (eq_attr "required_for_purecode" "yes") 244 (not (match_test "arm_disable_literal_pool"))) 245 (const_string "no") 246 247 (eq_attr "arch_enabled" "no") 248 (const_string "no")] 249 (const_string "yes"))) 250 251; POOL_RANGE is how far away from a constant pool entry that this insn 252; can be placed. If the distance is zero, then this insn will never 253; reference the pool. 254; Note that for Thumb constant pools the PC value is rounded down to the 255; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for 256; Thumb insns) should be set to <max_range> - 2. 257; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry 258; before its address. It is set to <max_range> - (8 + <data_size>). 259(define_attr "arm_pool_range" "" (const_int 0)) 260(define_attr "thumb2_pool_range" "" (const_int 0)) 261(define_attr "arm_neg_pool_range" "" (const_int 0)) 262(define_attr "thumb2_neg_pool_range" "" (const_int 0)) 263 264(define_attr "pool_range" "" 265 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")] 266 (attr "arm_pool_range"))) 267(define_attr "neg_pool_range" "" 268 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")] 269 (attr "arm_neg_pool_range"))) 270 271; An assembler sequence may clobber the condition codes without us knowing. 272; If such an insn references the pool, then we have no way of knowing how, 273; so use the most conservative value for pool_range. 274(define_asm_attributes 275 [(set_attr "conds" "clob") 276 (set_attr "length" "4") 277 (set_attr "pool_range" "250")]) 278 279; Load scheduling, set from the arm_ld_sched variable 280; initialized by arm_option_override() 281(define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched"))) 282 283; condition codes: this one is used by final_prescan_insn to speed up 284; conditionalizing instructions. It saves having to scan the rtl to see if 285; it uses or alters the condition codes. 286; 287; USE means that the condition codes are used by the insn in the process of 288; outputting code, this means (at present) that we can't use the insn in 289; inlined branches 290; 291; SET means that the purpose of the insn is to set the condition codes in a 292; well defined manner. 293; 294; CLOB means that the condition codes are altered in an undefined manner, if 295; they are altered at all 296; 297; UNCONDITIONAL means the instruction cannot be conditionally executed and 298; that the instruction does not use or alter the condition codes. 299; 300; NOCOND means that the instruction does not use or alter the condition 301; codes but can be converted into a conditionally exectuted instruction. 302 303(define_attr "conds" "use,set,clob,unconditional,nocond" 304 (if_then_else 305 (ior (eq_attr "is_thumb1" "yes") 306 (eq_attr "type" "call")) 307 (const_string "clob") 308 (if_then_else 309 (ior (eq_attr "is_neon_type" "yes") 310 (eq_attr "is_mve_type" "yes")) 311 (const_string "unconditional") 312 (const_string "nocond")))) 313 314; Predicable means that the insn can be conditionally executed based on 315; an automatically added predicate (additional patterns are generated by 316; gen...). We default to 'no' because no Thumb patterns match this rule 317; and not all ARM patterns do. 318(define_attr "predicable" "no,yes" (const_string "no")) 319 320; Only model the write buffer for ARM6 and ARM7. Earlier processors don't 321; have one. Later ones, such as StrongARM, have write-back caches, so don't 322; suffer blockages enough to warrant modelling this (and it can adversely 323; affect the schedule). 324(define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf"))) 325 326; WRITE_CONFLICT implies that a read following an unrelated write is likely 327; to stall the processor. Used with model_wbuf above. 328(define_attr "write_conflict" "no,yes" 329 (if_then_else (eq_attr "type" 330 "block,call,load_4") 331 (const_string "yes") 332 (const_string "no"))) 333 334; Classify the insns into those that take one cycle and those that take more 335; than one on the main cpu execution unit. 336(define_attr "core_cycles" "single,multi" 337 (if_then_else (eq_attr "type" 338 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\ 339 alu_shift_imm_lsl_1to4, alu_shift_imm_other, alu_shift_reg, alu_dsp_reg,\ 340 alus_ext, alus_imm, alus_sreg,\ 341 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\ 342 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\ 343 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\ 344 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\ 345 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\ 346 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\ 347 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\ 348 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\ 349 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\ 350 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\ 351 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\ 352 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\ 353 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\ 354 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge") 355 (const_string "single") 356 (const_string "multi"))) 357 358;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a 359;; distant label. Only applicable to Thumb code. 360(define_attr "far_jump" "yes,no" (const_string "no")) 361 362 363;; The number of machine instructions this pattern expands to. 364;; Used for Thumb-2 conditional execution. 365(define_attr "ce_count" "" (const_int 1)) 366 367;;--------------------------------------------------------------------------- 368;; Unspecs 369 370(include "unspecs.md") 371 372;;--------------------------------------------------------------------------- 373;; Mode iterators 374 375(include "iterators.md") 376 377;;--------------------------------------------------------------------------- 378;; Predicates 379 380(include "predicates.md") 381(include "constraints.md") 382 383;;--------------------------------------------------------------------------- 384;; Pipeline descriptions 385 386(define_attr "tune_cortexr4" "yes,no" 387 (const (if_then_else 388 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5") 389 (const_string "yes") 390 (const_string "no")))) 391 392;; True if the generic scheduling description should be used. 393 394(define_attr "generic_sched" "yes,no" 395 (const (if_then_else 396 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\ 397 arm926ejs,arm10e,arm1026ejs,arm1136js,\ 398 arm1136jfs,cortexa5,cortexa7,cortexa8,\ 399 cortexa9,cortexa12,cortexa15,cortexa17,\ 400 cortexa53,cortexa57,cortexm4,cortexm7,\ 401 exynosm1,marvell_pj4,xgene1") 402 (eq_attr "tune_cortexr4" "yes")) 403 (const_string "no") 404 (const_string "yes")))) 405 406(define_attr "generic_vfp" "yes,no" 407 (const (if_then_else 408 (and (eq_attr "fpu" "vfp") 409 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\ 410 cortexa8,cortexa9,cortexa53,cortexm4,\ 411 cortexm7,marvell_pj4,xgene1") 412 (eq_attr "tune_cortexr4" "no")) 413 (const_string "yes") 414 (const_string "no")))) 415 416(include "marvell-f-iwmmxt.md") 417(include "arm-generic.md") 418(include "arm926ejs.md") 419(include "arm1020e.md") 420(include "arm1026ejs.md") 421(include "arm1136jfs.md") 422(include "fa526.md") 423(include "fa606te.md") 424(include "fa626te.md") 425(include "fmp626.md") 426(include "fa726te.md") 427(include "cortex-a5.md") 428(include "cortex-a7.md") 429(include "cortex-a8.md") 430(include "cortex-a9.md") 431(include "cortex-a15.md") 432(include "cortex-a17.md") 433(include "cortex-a53.md") 434(include "cortex-a57.md") 435(include "cortex-r4.md") 436(include "cortex-r4f.md") 437(include "cortex-m7.md") 438(include "cortex-m4.md") 439(include "cortex-m4-fpu.md") 440(include "exynos-m1.md") 441(include "vfp11.md") 442(include "marvell-pj4.md") 443(include "xgene1.md") 444 445;; define_subst and associated attributes 446 447(define_subst "add_setq" 448 [(set (match_operand:SI 0 "" "") 449 (match_operand:SI 1 "" ""))] 450 "" 451 [(set (match_dup 0) 452 (match_dup 1)) 453 (set (reg:CC APSRQ_REGNUM) 454 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]) 455 456(define_subst_attr "add_clobber_q_name" "add_setq" "" "_setq") 457(define_subst_attr "add_clobber_q_pred" "add_setq" "!ARM_Q_BIT_READ" 458 "ARM_Q_BIT_READ") 459 460;;--------------------------------------------------------------------------- 461;; Insn patterns 462;; 463;; Addition insns. 464 465;; Note: For DImode insns, there is normally no reason why operands should 466;; not be in the same register, what we don't want is for something being 467;; written to partially overlap something that is an input. 468 469(define_expand "adddi3" 470 [(parallel 471 [(set (match_operand:DI 0 "s_register_operand") 472 (plus:DI (match_operand:DI 1 "s_register_operand") 473 (match_operand:DI 2 "reg_or_int_operand"))) 474 (clobber (reg:CC CC_REGNUM))])] 475 "TARGET_EITHER" 476 " 477 if (TARGET_THUMB1) 478 { 479 if (!REG_P (operands[2])) 480 operands[2] = force_reg (DImode, operands[2]); 481 } 482 else 483 { 484 rtx lo_result, hi_result, lo_dest, hi_dest; 485 rtx lo_op1, hi_op1, lo_op2, hi_op2; 486 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1, 487 &lo_op2, &hi_op2); 488 lo_result = lo_dest = gen_lowpart (SImode, operands[0]); 489 hi_result = hi_dest = gen_highpart (SImode, operands[0]); 490 491 if (lo_op2 == const0_rtx) 492 { 493 lo_dest = lo_op1; 494 if (!arm_add_operand (hi_op2, SImode)) 495 hi_op2 = force_reg (SImode, hi_op2); 496 /* Assume hi_op2 won't also be zero. */ 497 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2)); 498 } 499 else 500 { 501 if (!arm_add_operand (lo_op2, SImode)) 502 lo_op2 = force_reg (SImode, lo_op2); 503 if (!arm_not_operand (hi_op2, SImode)) 504 hi_op2 = force_reg (SImode, hi_op2); 505 506 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2)); 507 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM), 508 const0_rtx); 509 if (hi_op2 == const0_rtx) 510 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry)); 511 else 512 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry)); 513 } 514 515 if (lo_result != lo_dest) 516 emit_move_insn (lo_result, lo_dest); 517 if (hi_result != hi_dest) 518 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest); 519 DONE; 520 } 521 " 522) 523 524(define_expand "addvsi4" 525 [(match_operand:SI 0 "s_register_operand") 526 (match_operand:SI 1 "s_register_operand") 527 (match_operand:SI 2 "arm_add_operand") 528 (match_operand 3 "")] 529 "TARGET_32BIT" 530{ 531 if (CONST_INT_P (operands[2])) 532 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2])); 533 else 534 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2])); 535 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]); 536 537 DONE; 538}) 539 540(define_expand "addvdi4" 541 [(match_operand:DI 0 "s_register_operand") 542 (match_operand:DI 1 "s_register_operand") 543 (match_operand:DI 2 "reg_or_int_operand") 544 (match_operand 3 "")] 545 "TARGET_32BIT" 546{ 547 rtx lo_result, hi_result; 548 rtx lo_op1, hi_op1, lo_op2, hi_op2; 549 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1, 550 &lo_op2, &hi_op2); 551 lo_result = gen_lowpart (SImode, operands[0]); 552 hi_result = gen_highpart (SImode, operands[0]); 553 554 if (lo_op2 == const0_rtx) 555 { 556 emit_move_insn (lo_result, lo_op1); 557 if (!arm_add_operand (hi_op2, SImode)) 558 hi_op2 = force_reg (SImode, hi_op2); 559 560 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3])); 561 } 562 else 563 { 564 if (!arm_add_operand (lo_op2, SImode)) 565 lo_op2 = force_reg (SImode, lo_op2); 566 if (!arm_not_operand (hi_op2, SImode)) 567 hi_op2 = force_reg (SImode, hi_op2); 568 569 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2)); 570 571 if (hi_op2 == const0_rtx) 572 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1)); 573 else if (CONST_INT_P (hi_op2)) 574 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2)); 575 else 576 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2)); 577 578 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]); 579 } 580 581 DONE; 582}) 583 584(define_expand "addsi3_cin_vout_reg" 585 [(parallel 586 [(set (match_dup 3) 587 (compare:CC_V 588 (plus:DI 589 (plus:DI (match_dup 4) 590 (sign_extend:DI (match_operand:SI 1 "s_register_operand"))) 591 (sign_extend:DI (match_operand:SI 2 "s_register_operand"))) 592 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1)) 593 (match_dup 2))))) 594 (set (match_operand:SI 0 "s_register_operand") 595 (plus:SI (plus:SI (match_dup 5) (match_dup 1)) 596 (match_dup 2)))])] 597 "TARGET_32BIT" 598 { 599 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM); 600 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM); 601 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx); 602 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx); 603 } 604) 605 606(define_insn "*addsi3_cin_vout_reg_insn" 607 [(set (reg:CC_V CC_REGNUM) 608 (compare:CC_V 609 (plus:DI 610 (plus:DI 611 (match_operand:DI 3 "arm_carry_operation" "") 612 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))) 613 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r"))) 614 (sign_extend:DI 615 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "") 616 (match_dup 1)) 617 (match_dup 2))))) 618 (set (match_operand:SI 0 "s_register_operand" "=l,r") 619 (plus:SI (plus:SI (match_dup 4) (match_dup 1)) 620 (match_dup 2)))] 621 "TARGET_32BIT" 622 "@ 623 adcs%?\\t%0, %0, %2 624 adcs%?\\t%0, %1, %2" 625 [(set_attr "type" "alus_sreg") 626 (set_attr "arch" "t2,*") 627 (set_attr "length" "2,4")] 628) 629 630(define_expand "addsi3_cin_vout_imm" 631 [(parallel 632 [(set (match_dup 3) 633 (compare:CC_V 634 (plus:DI 635 (plus:DI (match_dup 4) 636 (sign_extend:DI (match_operand:SI 1 "s_register_operand"))) 637 (match_dup 2)) 638 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1)) 639 (match_dup 2))))) 640 (set (match_operand:SI 0 "s_register_operand") 641 (plus:SI (plus:SI (match_dup 5) (match_dup 1)) 642 (match_operand 2 "arm_adcimm_operand")))])] 643 "TARGET_32BIT" 644 { 645 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM); 646 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM); 647 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx); 648 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx); 649 } 650) 651 652(define_insn "*addsi3_cin_vout_imm_insn" 653 [(set (reg:CC_V CC_REGNUM) 654 (compare:CC_V 655 (plus:DI 656 (plus:DI 657 (match_operand:DI 3 "arm_carry_operation" "") 658 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))) 659 (match_operand 2 "arm_adcimm_operand" "I,K")) 660 (sign_extend:DI 661 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "") 662 (match_dup 1)) 663 (match_dup 2))))) 664 (set (match_operand:SI 0 "s_register_operand" "=r,r") 665 (plus:SI (plus:SI (match_dup 4) (match_dup 1)) 666 (match_dup 2)))] 667 "TARGET_32BIT" 668 "@ 669 adcs%?\\t%0, %1, %2 670 sbcs%?\\t%0, %1, #%B2" 671 [(set_attr "type" "alus_imm")] 672) 673 674(define_expand "addsi3_cin_vout_0" 675 [(parallel 676 [(set (match_dup 2) 677 (compare:CC_V 678 (plus:DI (match_dup 3) 679 (sign_extend:DI (match_operand:SI 1 "s_register_operand"))) 680 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1))))) 681 (set (match_operand:SI 0 "s_register_operand") 682 (plus:SI (match_dup 4) (match_dup 1)))])] 683 "TARGET_32BIT" 684 { 685 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM); 686 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM); 687 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx); 688 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx); 689 } 690) 691 692(define_insn "*addsi3_cin_vout_0_insn" 693 [(set (reg:CC_V CC_REGNUM) 694 (compare:CC_V 695 (plus:DI 696 (match_operand:DI 2 "arm_carry_operation" "") 697 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))) 698 (sign_extend:DI (plus:SI 699 (match_operand:SI 3 "arm_carry_operation" "") 700 (match_dup 1))))) 701 (set (match_operand:SI 0 "s_register_operand" "=r") 702 (plus:SI (match_dup 3) (match_dup 1)))] 703 "TARGET_32BIT" 704 "adcs%?\\t%0, %1, #0" 705 [(set_attr "type" "alus_imm")] 706) 707 708(define_expand "uaddvsi4" 709 [(match_operand:SI 0 "s_register_operand") 710 (match_operand:SI 1 "s_register_operand") 711 (match_operand:SI 2 "arm_add_operand") 712 (match_operand 3 "")] 713 "TARGET_32BIT" 714{ 715 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2])); 716 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]); 717 718 DONE; 719}) 720 721(define_expand "uaddvdi4" 722 [(match_operand:DI 0 "s_register_operand") 723 (match_operand:DI 1 "s_register_operand") 724 (match_operand:DI 2 "reg_or_int_operand") 725 (match_operand 3 "")] 726 "TARGET_32BIT" 727{ 728 rtx lo_result, hi_result; 729 rtx lo_op1, hi_op1, lo_op2, hi_op2; 730 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1, 731 &lo_op2, &hi_op2); 732 lo_result = gen_lowpart (SImode, operands[0]); 733 hi_result = gen_highpart (SImode, operands[0]); 734 735 if (lo_op2 == const0_rtx) 736 { 737 emit_move_insn (lo_result, lo_op1); 738 if (!arm_add_operand (hi_op2, SImode)) 739 hi_op2 = force_reg (SImode, hi_op2); 740 741 emit_insn (gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3])); 742 } 743 else 744 { 745 if (!arm_add_operand (lo_op2, SImode)) 746 lo_op2 = force_reg (SImode, lo_op2); 747 if (!arm_not_operand (hi_op2, SImode)) 748 hi_op2 = force_reg (SImode, hi_op2); 749 750 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2)); 751 752 if (hi_op2 == const0_rtx) 753 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1)); 754 else if (CONST_INT_P (hi_op2)) 755 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2)); 756 else 757 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2)); 758 759 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]); 760 } 761 762 DONE; 763}) 764 765(define_expand "addsi3_cin_cout_reg" 766 [(parallel 767 [(set (match_dup 3) 768 (compare:CC_ADC 769 (plus:DI 770 (plus:DI (match_dup 4) 771 (zero_extend:DI (match_operand:SI 1 "s_register_operand"))) 772 (zero_extend:DI (match_operand:SI 2 "s_register_operand"))) 773 (const_int 4294967296))) 774 (set (match_operand:SI 0 "s_register_operand") 775 (plus:SI (plus:SI (match_dup 5) (match_dup 1)) 776 (match_dup 2)))])] 777 "TARGET_32BIT" 778 { 779 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM); 780 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM); 781 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx); 782 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx); 783 } 784) 785 786(define_insn "*addsi3_cin_cout_reg_insn" 787 [(set (reg:CC_ADC CC_REGNUM) 788 (compare:CC_ADC 789 (plus:DI 790 (plus:DI 791 (match_operand:DI 3 "arm_carry_operation" "") 792 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))) 793 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r"))) 794 (const_int 4294967296))) 795 (set (match_operand:SI 0 "s_register_operand" "=l,r") 796 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "") 797 (match_dup 1)) 798 (match_dup 2)))] 799 "TARGET_32BIT" 800 "@ 801 adcs%?\\t%0, %0, %2 802 adcs%?\\t%0, %1, %2" 803 [(set_attr "type" "alus_sreg") 804 (set_attr "arch" "t2,*") 805 (set_attr "length" "2,4")] 806) 807 808(define_expand "addsi3_cin_cout_imm" 809 [(parallel 810 [(set (match_dup 3) 811 (compare:CC_ADC 812 (plus:DI 813 (plus:DI (match_dup 4) 814 (zero_extend:DI (match_operand:SI 1 "s_register_operand"))) 815 (match_dup 6)) 816 (const_int 4294967296))) 817 (set (match_operand:SI 0 "s_register_operand") 818 (plus:SI (plus:SI (match_dup 5) (match_dup 1)) 819 (match_operand:SI 2 "arm_adcimm_operand")))])] 820 "TARGET_32BIT" 821 { 822 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM); 823 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM); 824 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx); 825 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx); 826 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff); 827 } 828) 829 830(define_insn "*addsi3_cin_cout_imm_insn" 831 [(set (reg:CC_ADC CC_REGNUM) 832 (compare:CC_ADC 833 (plus:DI 834 (plus:DI 835 (match_operand:DI 3 "arm_carry_operation" "") 836 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))) 837 (match_operand:DI 5 "const_int_operand" "n,n")) 838 (const_int 4294967296))) 839 (set (match_operand:SI 0 "s_register_operand" "=r,r") 840 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "") 841 (match_dup 1)) 842 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))] 843 "TARGET_32BIT 844 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])" 845 "@ 846 adcs%?\\t%0, %1, %2 847 sbcs%?\\t%0, %1, #%B2" 848 [(set_attr "type" "alus_imm")] 849) 850 851(define_expand "addsi3_cin_cout_0" 852 [(parallel 853 [(set (match_dup 2) 854 (compare:CC_ADC 855 (plus:DI (match_dup 3) 856 (zero_extend:DI (match_operand:SI 1 "s_register_operand"))) 857 (const_int 4294967296))) 858 (set (match_operand:SI 0 "s_register_operand") 859 (plus:SI (match_dup 4) (match_dup 1)))])] 860 "TARGET_32BIT" 861 { 862 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM); 863 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM); 864 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx); 865 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx); 866 } 867) 868 869(define_insn "*addsi3_cin_cout_0_insn" 870 [(set (reg:CC_ADC CC_REGNUM) 871 (compare:CC_ADC 872 (plus:DI 873 (match_operand:DI 2 "arm_carry_operation" "") 874 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))) 875 (const_int 4294967296))) 876 (set (match_operand:SI 0 "s_register_operand" "=r") 877 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))] 878 "TARGET_32BIT" 879 "adcs%?\\t%0, %1, #0" 880 [(set_attr "type" "alus_imm")] 881) 882 883(define_expand "addsi3" 884 [(set (match_operand:SI 0 "s_register_operand") 885 (plus:SI (match_operand:SI 1 "s_register_operand") 886 (match_operand:SI 2 "reg_or_int_operand")))] 887 "TARGET_EITHER" 888 " 889 if (TARGET_32BIT && CONST_INT_P (operands[2])) 890 { 891 arm_split_constant (PLUS, SImode, NULL_RTX, 892 INTVAL (operands[2]), operands[0], operands[1], 893 optimize && can_create_pseudo_p ()); 894 DONE; 895 } 896 " 897) 898 899; If there is a scratch available, this will be faster than synthesizing the 900; addition. 901(define_peephole2 902 [(match_scratch:SI 3 "r") 903 (set (match_operand:SI 0 "arm_general_register_operand" "") 904 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "") 905 (match_operand:SI 2 "const_int_operand" "")))] 906 "TARGET_32BIT && 907 !(const_ok_for_arm (INTVAL (operands[2])) 908 || const_ok_for_arm (-INTVAL (operands[2]))) 909 && const_ok_for_arm (~INTVAL (operands[2]))" 910 [(set (match_dup 3) (match_dup 2)) 911 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))] 912 "" 913) 914 915;; The r/r/k alternative is required when reloading the address 916;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will 917;; put the duplicated register first, and not try the commutative version. 918(define_insn_and_split "*arm_addsi3" 919 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r") 920 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk") 921 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))] 922 "TARGET_32BIT" 923 "@ 924 add%?\\t%0, %0, %2 925 add%?\\t%0, %1, %2 926 add%?\\t%0, %1, %2 927 add%?\\t%0, %1, %2 928 add%?\\t%0, %1, %2 929 add%?\\t%0, %1, %2 930 add%?\\t%0, %2, %1 931 add%?\\t%0, %1, %2 932 addw%?\\t%0, %1, %2 933 addw%?\\t%0, %1, %2 934 sub%?\\t%0, %1, #%n2 935 sub%?\\t%0, %1, #%n2 936 sub%?\\t%0, %1, #%n2 937 subw%?\\t%0, %1, #%n2 938 subw%?\\t%0, %1, #%n2 939 #" 940 "TARGET_32BIT 941 && CONST_INT_P (operands[2]) 942 && !const_ok_for_op (INTVAL (operands[2]), PLUS) 943 && (reload_completed || !arm_eliminable_register (operands[1]))" 944 [(clobber (const_int 0))] 945 " 946 arm_split_constant (PLUS, SImode, curr_insn, 947 INTVAL (operands[2]), operands[0], 948 operands[1], 0); 949 DONE; 950 " 951 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16") 952 (set_attr "predicable" "yes") 953 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no") 954 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*") 955 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "") 956 (const_string "alu_imm") 957 (const_string "alu_sreg"))) 958 ] 959) 960 961(define_insn "addsi3_compareV_reg" 962 [(set (reg:CC_V CC_REGNUM) 963 (compare:CC_V 964 (plus:DI 965 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r")) 966 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r"))) 967 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2))))) 968 (set (match_operand:SI 0 "register_operand" "=l,r,r") 969 (plus:SI (match_dup 1) (match_dup 2)))] 970 "TARGET_32BIT" 971 "adds%?\\t%0, %1, %2" 972 [(set_attr "conds" "set") 973 (set_attr "arch" "t2,t2,*") 974 (set_attr "length" "2,2,4") 975 (set_attr "type" "alus_sreg")] 976) 977 978(define_insn "*addsi3_compareV_reg_nosum" 979 [(set (reg:CC_V CC_REGNUM) 980 (compare:CC_V 981 (plus:DI 982 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r")) 983 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r"))) 984 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))] 985 "TARGET_32BIT" 986 "cmn%?\\t%0, %1" 987 [(set_attr "conds" "set") 988 (set_attr "arch" "t2,*") 989 (set_attr "length" "2,4") 990 (set_attr "type" "alus_sreg")] 991) 992 993(define_insn "subvsi3_intmin" 994 [(set (reg:CC_V CC_REGNUM) 995 (compare:CC_V 996 (plus:DI 997 (sign_extend:DI 998 (match_operand:SI 1 "register_operand" "r")) 999 (const_int 2147483648)) 1000 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648))))) 1001 (set (match_operand:SI 0 "register_operand" "=r") 1002 (plus:SI (match_dup 1) (const_int -2147483648)))] 1003 "TARGET_32BIT" 1004 "subs%?\\t%0, %1, #-2147483648" 1005 [(set_attr "conds" "set") 1006 (set_attr "type" "alus_imm")] 1007) 1008 1009(define_insn "addsi3_compareV_imm" 1010 [(set (reg:CC_V CC_REGNUM) 1011 (compare:CC_V 1012 (plus:DI 1013 (sign_extend:DI 1014 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r")) 1015 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L")) 1016 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2))))) 1017 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r") 1018 (plus:SI (match_dup 1) (match_dup 2)))] 1019 "TARGET_32BIT 1020 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))" 1021 "@ 1022 adds%?\\t%0, %1, %2 1023 adds%?\\t%0, %0, %2 1024 subs%?\\t%0, %1, #%n2 1025 subs%?\\t%0, %0, #%n2 1026 adds%?\\t%0, %1, %2 1027 subs%?\\t%0, %1, #%n2" 1028 [(set_attr "conds" "set") 1029 (set_attr "arch" "t2,t2,t2,t2,*,*") 1030 (set_attr "length" "2,2,2,2,4,4") 1031 (set_attr "type" "alus_imm")] 1032) 1033 1034(define_insn "addsi3_compareV_imm_nosum" 1035 [(set (reg:CC_V CC_REGNUM) 1036 (compare:CC_V 1037 (plus:DI 1038 (sign_extend:DI 1039 (match_operand:SI 0 "register_operand" "l,r,r")) 1040 (match_operand 1 "arm_addimm_operand" "Pw,I,L")) 1041 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))] 1042 "TARGET_32BIT 1043 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))" 1044 "@ 1045 cmp%?\\t%0, #%n1 1046 cmn%?\\t%0, %1 1047 cmp%?\\t%0, #%n1" 1048 [(set_attr "conds" "set") 1049 (set_attr "arch" "t2,*,*") 1050 (set_attr "length" "2,4,4") 1051 (set_attr "type" "alus_imm")] 1052) 1053 1054;; We can handle more constants efficently if we can clobber either a scratch 1055;; or the other source operand. We deliberately leave this late as in 1056;; high register pressure situations it's not worth forcing any reloads. 1057(define_peephole2 1058 [(match_scratch:SI 2 "l") 1059 (set (reg:CC_V CC_REGNUM) 1060 (compare:CC_V 1061 (plus:DI 1062 (sign_extend:DI 1063 (match_operand:SI 0 "low_register_operand")) 1064 (match_operand 1 "const_int_operand")) 1065 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))] 1066 "TARGET_THUMB2 1067 && satisfies_constraint_Pd (operands[1])" 1068 [(parallel[ 1069 (set (reg:CC_V CC_REGNUM) 1070 (compare:CC_V 1071 (plus:DI (sign_extend:DI (match_dup 0)) 1072 (sign_extend:DI (match_dup 1))) 1073 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1))))) 1074 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])] 1075) 1076 1077(define_peephole2 1078 [(set (reg:CC_V CC_REGNUM) 1079 (compare:CC_V 1080 (plus:DI 1081 (sign_extend:DI 1082 (match_operand:SI 0 "low_register_operand")) 1083 (match_operand 1 "const_int_operand")) 1084 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))] 1085 "TARGET_THUMB2 1086 && dead_or_set_p (peep2_next_insn (0), operands[0]) 1087 && satisfies_constraint_Py (operands[1])" 1088 [(parallel[ 1089 (set (reg:CC_V CC_REGNUM) 1090 (compare:CC_V 1091 (plus:DI (sign_extend:DI (match_dup 0)) 1092 (sign_extend:DI (match_dup 1))) 1093 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1))))) 1094 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])] 1095) 1096 1097(define_insn "addsi3_compare0" 1098 [(set (reg:CC_NZ CC_REGNUM) 1099 (compare:CC_NZ 1100 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r") 1101 (match_operand:SI 2 "arm_add_operand" "I,L,r")) 1102 (const_int 0))) 1103 (set (match_operand:SI 0 "s_register_operand" "=r,r,r") 1104 (plus:SI (match_dup 1) (match_dup 2)))] 1105 "TARGET_ARM" 1106 "@ 1107 adds%?\\t%0, %1, %2 1108 subs%?\\t%0, %1, #%n2 1109 adds%?\\t%0, %1, %2" 1110 [(set_attr "conds" "set") 1111 (set_attr "type" "alus_imm,alus_imm,alus_sreg")] 1112) 1113 1114(define_insn "*addsi3_compare0_scratch" 1115 [(set (reg:CC_NZ CC_REGNUM) 1116 (compare:CC_NZ 1117 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r") 1118 (match_operand:SI 1 "arm_add_operand" "I,L, r")) 1119 (const_int 0)))] 1120 "TARGET_ARM" 1121 "@ 1122 cmn%?\\t%0, %1 1123 cmp%?\\t%0, #%n1 1124 cmn%?\\t%0, %1" 1125 [(set_attr "conds" "set") 1126 (set_attr "predicable" "yes") 1127 (set_attr "type" "alus_imm,alus_imm,alus_sreg")] 1128) 1129 1130(define_insn "*compare_negsi_si" 1131 [(set (reg:CC_Z CC_REGNUM) 1132 (compare:CC_Z 1133 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r")) 1134 (match_operand:SI 1 "s_register_operand" "l,r")))] 1135 "TARGET_32BIT" 1136 "cmn%?\\t%1, %0" 1137 [(set_attr "conds" "set") 1138 (set_attr "predicable" "yes") 1139 (set_attr "arch" "t2,*") 1140 (set_attr "length" "2,4") 1141 (set_attr "predicable_short_it" "yes,no") 1142 (set_attr "type" "alus_sreg")] 1143) 1144 1145;; This is the canonicalization of subsi3_compare when the 1146;; addend is a constant. 1147(define_insn "cmpsi2_addneg" 1148 [(set (reg:CC CC_REGNUM) 1149 (compare:CC 1150 (match_operand:SI 1 "s_register_operand" "r,r") 1151 (match_operand:SI 2 "arm_addimm_operand" "I,L"))) 1152 (set (match_operand:SI 0 "s_register_operand" "=r,r") 1153 (plus:SI (match_dup 1) 1154 (match_operand:SI 3 "arm_addimm_operand" "L,I")))] 1155 "TARGET_32BIT 1156 && (INTVAL (operands[2]) 1157 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))" 1158{ 1159 /* For 0 and INT_MIN it is essential that we use subs, as adds will result 1160 in different condition codes (like cmn rather than like cmp), so that 1161 alternative comes first. Both alternatives can match for any 0x??000000 1162 where except for 0 and INT_MIN it doesn't matter what we choose, and also 1163 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1 1164 as it is shorter. */ 1165 if (which_alternative == 0 && operands[3] != const1_rtx) 1166 return "subs%?\\t%0, %1, #%n3"; 1167 else 1168 return "adds%?\\t%0, %1, %3"; 1169} 1170 [(set_attr "conds" "set") 1171 (set_attr "type" "alus_sreg")] 1172) 1173 1174;; Convert the sequence 1175;; sub rd, rn, #1 1176;; cmn rd, #1 (equivalent to cmp rd, #-1) 1177;; bne dest 1178;; into 1179;; subs rd, rn, #1 1180;; bcs dest ((unsigned)rn >= 1) 1181;; similarly for the beq variant using bcc. 1182;; This is a common looping idiom (while (n--)) 1183(define_peephole2 1184 [(set (match_operand:SI 0 "arm_general_register_operand" "") 1185 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "") 1186 (const_int -1))) 1187 (set (match_operand 2 "cc_register" "") 1188 (compare (match_dup 0) (const_int -1))) 1189 (set (pc) 1190 (if_then_else (match_operator 3 "equality_operator" 1191 [(match_dup 2) (const_int 0)]) 1192 (match_operand 4 "" "") 1193 (match_operand 5 "" "")))] 1194 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])" 1195 [(parallel[ 1196 (set (match_dup 2) 1197 (compare:CC 1198 (match_dup 1) (const_int 1))) 1199 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))]) 1200 (set (pc) 1201 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)]) 1202 (match_dup 4) 1203 (match_dup 5)))] 1204 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM); 1205 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE 1206 ? GEU : LTU), 1207 VOIDmode, 1208 operands[2], const0_rtx);" 1209) 1210 1211;; The next four insns work because they compare the result with one of 1212;; the operands, and we know that the use of the condition code is 1213;; either GEU or LTU, so we can use the carry flag from the addition 1214;; instead of doing the compare a second time. 1215(define_insn "addsi3_compare_op1" 1216 [(set (reg:CC_C CC_REGNUM) 1217 (compare:CC_C 1218 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk") 1219 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L")) 1220 (match_dup 1))) 1221 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk") 1222 (plus:SI (match_dup 1) (match_dup 2)))] 1223 "TARGET_32BIT" 1224 "@ 1225 adds%?\\t%0, %1, %2 1226 adds%?\\t%0, %0, %2 1227 subs%?\\t%0, %1, #%n2 1228 subs%?\\t%0, %0, #%n2 1229 adds%?\\t%0, %1, %2 1230 subs%?\\t%0, %1, #%n2" 1231 [(set_attr "conds" "set") 1232 (set_attr "arch" "t2,t2,t2,t2,*,*") 1233 (set_attr "length" "2,2,2,2,4,4") 1234 (set (attr "type") 1235 (if_then_else (match_operand 2 "const_int_operand") 1236 (const_string "alu_imm") 1237 (const_string "alu_sreg")))] 1238) 1239 1240(define_insn "*addsi3_compare_op2" 1241 [(set (reg:CC_C CC_REGNUM) 1242 (compare:CC_C 1243 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r") 1244 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L")) 1245 (match_dup 2))) 1246 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r") 1247 (plus:SI (match_dup 1) (match_dup 2)))] 1248 "TARGET_32BIT" 1249 "@ 1250 adds%?\\t%0, %1, %2 1251 adds%?\\t%0, %0, %2 1252 subs%?\\t%0, %1, #%n2 1253 subs%?\\t%0, %0, #%n2 1254 adds%?\\t%0, %1, %2 1255 subs%?\\t%0, %1, #%n2" 1256 [(set_attr "conds" "set") 1257 (set_attr "arch" "t2,t2,t2,t2,*,*") 1258 (set_attr "length" "2,2,2,2,4,4") 1259 (set (attr "type") 1260 (if_then_else (match_operand 2 "const_int_operand") 1261 (const_string "alu_imm") 1262 (const_string "alu_sreg")))] 1263) 1264 1265(define_insn "*compare_addsi2_op0" 1266 [(set (reg:CC_C CC_REGNUM) 1267 (compare:CC_C 1268 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r") 1269 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L")) 1270 (match_dup 0)))] 1271 "TARGET_32BIT" 1272 "@ 1273 cmn%?\\t%0, %1 1274 cmp%?\\t%0, #%n1 1275 cmn%?\\t%0, %1 1276 cmp%?\\t%0, #%n1" 1277 [(set_attr "conds" "set") 1278 (set_attr "predicable" "yes") 1279 (set_attr "arch" "t2,t2,*,*") 1280 (set_attr "predicable_short_it" "yes,yes,no,no") 1281 (set_attr "length" "2,2,4,4") 1282 (set (attr "type") 1283 (if_then_else (match_operand 1 "const_int_operand") 1284 (const_string "alu_imm") 1285 (const_string "alu_sreg")))] 1286) 1287 1288(define_insn "*compare_addsi2_op1" 1289 [(set (reg:CC_C CC_REGNUM) 1290 (compare:CC_C 1291 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r") 1292 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L")) 1293 (match_dup 1)))] 1294 "TARGET_32BIT" 1295 "@ 1296 cmn%?\\t%0, %1 1297 cmp%?\\t%0, #%n1 1298 cmn%?\\t%0, %1 1299 cmp%?\\t%0, #%n1" 1300 [(set_attr "conds" "set") 1301 (set_attr "predicable" "yes") 1302 (set_attr "arch" "t2,t2,*,*") 1303 (set_attr "predicable_short_it" "yes,yes,no,no") 1304 (set_attr "length" "2,2,4,4") 1305 (set (attr "type") 1306 (if_then_else (match_operand 1 "const_int_operand") 1307 (const_string "alu_imm") 1308 (const_string "alu_sreg")))] 1309 ) 1310 1311(define_insn "addsi3_carryin" 1312 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r") 1313 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r") 1314 (match_operand:SI 2 "arm_not_operand" "0,rI,K")) 1315 (match_operand:SI 3 "arm_carry_operation" "")))] 1316 "TARGET_32BIT" 1317 "@ 1318 adc%?\\t%0, %1, %2 1319 adc%?\\t%0, %1, %2 1320 sbc%?\\t%0, %1, #%B2" 1321 [(set_attr "conds" "use") 1322 (set_attr "predicable" "yes") 1323 (set_attr "arch" "t2,*,*") 1324 (set_attr "length" "4") 1325 (set_attr "predicable_short_it" "yes,no,no") 1326 (set_attr "type" "adc_reg,adc_reg,adc_imm")] 1327) 1328 1329;; Canonicalization of the above when the immediate is zero. 1330(define_insn "add0si3_carryin" 1331 [(set (match_operand:SI 0 "s_register_operand" "=r") 1332 (plus:SI (match_operand:SI 2 "arm_carry_operation" "") 1333 (match_operand:SI 1 "arm_not_operand" "r")))] 1334 "TARGET_32BIT" 1335 "adc%?\\t%0, %1, #0" 1336 [(set_attr "conds" "use") 1337 (set_attr "predicable" "yes") 1338 (set_attr "length" "4") 1339 (set_attr "type" "adc_imm")] 1340) 1341 1342(define_insn "*addsi3_carryin_alt2" 1343 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r") 1344 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "") 1345 (match_operand:SI 1 "s_register_operand" "%l,r,r")) 1346 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))] 1347 "TARGET_32BIT" 1348 "@ 1349 adc%?\\t%0, %1, %2 1350 adc%?\\t%0, %1, %2 1351 sbc%?\\t%0, %1, #%B2" 1352 [(set_attr "conds" "use") 1353 (set_attr "predicable" "yes") 1354 (set_attr "arch" "t2,*,*") 1355 (set_attr "length" "4") 1356 (set_attr "predicable_short_it" "yes,no,no") 1357 (set_attr "type" "adc_reg,adc_reg,adc_imm")] 1358) 1359 1360(define_insn "*addsi3_carryin_shift" 1361 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 1362 (plus:SI (plus:SI 1363 (match_operator:SI 2 "shift_operator" 1364 [(match_operand:SI 3 "s_register_operand" "r,r") 1365 (match_operand:SI 4 "shift_amount_operand" "M,r")]) 1366 (match_operand:SI 5 "arm_carry_operation" "")) 1367 (match_operand:SI 1 "s_register_operand" "r,r")))] 1368 "TARGET_32BIT" 1369 "adc%?\\t%0, %1, %3%S2" 1370 [(set_attr "conds" "use") 1371 (set_attr "arch" "32,a") 1372 (set_attr "shift" "3") 1373 (set_attr "predicable" "yes") 1374 (set_attr "autodetect_type" "alu_shift_operator2")] 1375) 1376 1377(define_insn "*addsi3_carryin_clobercc" 1378 [(set (match_operand:SI 0 "s_register_operand" "=r") 1379 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r") 1380 (match_operand:SI 2 "arm_rhs_operand" "rI")) 1381 (match_operand:SI 3 "arm_carry_operation" ""))) 1382 (clobber (reg:CC CC_REGNUM))] 1383 "TARGET_32BIT" 1384 "adcs%?\\t%0, %1, %2" 1385 [(set_attr "conds" "set") 1386 (set_attr "type" "adcs_reg")] 1387) 1388 1389(define_expand "subvsi4" 1390 [(match_operand:SI 0 "s_register_operand") 1391 (match_operand:SI 1 "arm_rhs_operand") 1392 (match_operand:SI 2 "arm_add_operand") 1393 (match_operand 3 "")] 1394 "TARGET_32BIT" 1395{ 1396 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2])) 1397 { 1398 /* If both operands are constants we can decide the result statically. */ 1399 wi::overflow_type overflow; 1400 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode), 1401 rtx_mode_t (operands[2], SImode), 1402 SIGNED, &overflow); 1403 emit_move_insn (operands[0], GEN_INT (val.to_shwi ())); 1404 if (overflow != wi::OVF_NONE) 1405 emit_jump_insn (gen_jump (operands[3])); 1406 DONE; 1407 } 1408 else if (CONST_INT_P (operands[2])) 1409 { 1410 operands[2] = GEN_INT (-INTVAL (operands[2])); 1411 /* Special case for INT_MIN. */ 1412 if (INTVAL (operands[2]) == 0x80000000) 1413 emit_insn (gen_subvsi3_intmin (operands[0], operands[1])); 1414 else 1415 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], 1416 operands[2])); 1417 } 1418 else if (CONST_INT_P (operands[1])) 1419 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2])); 1420 else 1421 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2])); 1422 1423 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]); 1424 DONE; 1425}) 1426 1427(define_expand "subvdi4" 1428 [(match_operand:DI 0 "s_register_operand") 1429 (match_operand:DI 1 "reg_or_int_operand") 1430 (match_operand:DI 2 "reg_or_int_operand") 1431 (match_operand 3 "")] 1432 "TARGET_32BIT" 1433{ 1434 rtx lo_result, hi_result; 1435 rtx lo_op1, hi_op1, lo_op2, hi_op2; 1436 lo_result = gen_lowpart (SImode, operands[0]); 1437 hi_result = gen_highpart (SImode, operands[0]); 1438 machine_mode mode = CCmode; 1439 1440 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2])) 1441 { 1442 /* If both operands are constants we can decide the result statically. */ 1443 wi::overflow_type overflow; 1444 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode), 1445 rtx_mode_t (operands[2], DImode), 1446 SIGNED, &overflow); 1447 emit_move_insn (operands[0], GEN_INT (val.to_shwi ())); 1448 if (overflow != wi::OVF_NONE) 1449 emit_jump_insn (gen_jump (operands[3])); 1450 DONE; 1451 } 1452 else if (CONST_INT_P (operands[1])) 1453 { 1454 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2, 1455 &lo_op1, &hi_op1); 1456 if (const_ok_for_arm (INTVAL (lo_op1))) 1457 { 1458 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2, 1459 GEN_INT (~UINTVAL (lo_op1)))); 1460 /* We could potentially use RSC here in Arm state, but not 1461 in Thumb, so it's probably not worth the effort of handling 1462 this. */ 1463 hi_op1 = force_reg (SImode, hi_op1); 1464 mode = CC_RSBmode; 1465 goto highpart; 1466 } 1467 operands[1] = force_reg (DImode, operands[1]); 1468 } 1469 1470 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1, 1471 &lo_op2, &hi_op2); 1472 if (lo_op2 == const0_rtx) 1473 { 1474 emit_move_insn (lo_result, lo_op1); 1475 if (!arm_add_operand (hi_op2, SImode)) 1476 hi_op2 = force_reg (SImode, hi_op2); 1477 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3])); 1478 DONE; 1479 } 1480 1481 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode)) 1482 lo_op2 = force_reg (SImode, lo_op2); 1483 if (CONST_INT_P (lo_op2)) 1484 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2, 1485 gen_int_mode (-INTVAL (lo_op2), SImode))); 1486 else 1487 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2)); 1488 1489 highpart: 1490 if (!arm_not_operand (hi_op2, SImode)) 1491 hi_op2 = force_reg (SImode, hi_op2); 1492 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM); 1493 if (CONST_INT_P (hi_op2)) 1494 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2, 1495 gen_rtx_LTU (SImode, ccreg, const0_rtx), 1496 gen_rtx_LTU (DImode, ccreg, 1497 const0_rtx))); 1498 else 1499 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2, 1500 gen_rtx_LTU (SImode, ccreg, const0_rtx), 1501 gen_rtx_LTU (DImode, ccreg, const0_rtx))); 1502 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]); 1503 1504 DONE; 1505}) 1506 1507(define_expand "usubvsi4" 1508 [(match_operand:SI 0 "s_register_operand") 1509 (match_operand:SI 1 "arm_rhs_operand") 1510 (match_operand:SI 2 "arm_add_operand") 1511 (match_operand 3 "")] 1512 "TARGET_32BIT" 1513{ 1514 machine_mode mode = CCmode; 1515 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2])) 1516 { 1517 /* If both operands are constants we can decide the result statically. */ 1518 wi::overflow_type overflow; 1519 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode), 1520 rtx_mode_t (operands[2], SImode), 1521 UNSIGNED, &overflow); 1522 emit_move_insn (operands[0], GEN_INT (val.to_shwi ())); 1523 if (overflow != wi::OVF_NONE) 1524 emit_jump_insn (gen_jump (operands[3])); 1525 DONE; 1526 } 1527 else if (CONST_INT_P (operands[2])) 1528 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2], 1529 gen_int_mode (-INTVAL (operands[2]), 1530 SImode))); 1531 else if (CONST_INT_P (operands[1])) 1532 { 1533 mode = CC_RSBmode; 1534 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2], 1535 GEN_INT (~UINTVAL (operands[1])))); 1536 } 1537 else 1538 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2])); 1539 arm_gen_unlikely_cbranch (LTU, mode, operands[3]); 1540 1541 DONE; 1542}) 1543 1544(define_expand "usubvdi4" 1545 [(match_operand:DI 0 "s_register_operand") 1546 (match_operand:DI 1 "reg_or_int_operand") 1547 (match_operand:DI 2 "reg_or_int_operand") 1548 (match_operand 3 "")] 1549 "TARGET_32BIT" 1550{ 1551 rtx lo_result, hi_result; 1552 rtx lo_op1, hi_op1, lo_op2, hi_op2; 1553 lo_result = gen_lowpart (SImode, operands[0]); 1554 hi_result = gen_highpart (SImode, operands[0]); 1555 machine_mode mode = CCmode; 1556 1557 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2])) 1558 { 1559 /* If both operands are constants we can decide the result statically. */ 1560 wi::overflow_type overflow; 1561 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode), 1562 rtx_mode_t (operands[2], DImode), 1563 UNSIGNED, &overflow); 1564 emit_move_insn (operands[0], GEN_INT (val.to_shwi ())); 1565 if (overflow != wi::OVF_NONE) 1566 emit_jump_insn (gen_jump (operands[3])); 1567 DONE; 1568 } 1569 else if (CONST_INT_P (operands[1])) 1570 { 1571 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2, 1572 &lo_op1, &hi_op1); 1573 if (const_ok_for_arm (INTVAL (lo_op1))) 1574 { 1575 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2, 1576 GEN_INT (~UINTVAL (lo_op1)))); 1577 /* We could potentially use RSC here in Arm state, but not 1578 in Thumb, so it's probably not worth the effort of handling 1579 this. */ 1580 hi_op1 = force_reg (SImode, hi_op1); 1581 mode = CC_RSBmode; 1582 goto highpart; 1583 } 1584 operands[1] = force_reg (DImode, operands[1]); 1585 } 1586 1587 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1, 1588 &lo_op2, &hi_op2); 1589 if (lo_op2 == const0_rtx) 1590 { 1591 emit_move_insn (lo_result, lo_op1); 1592 if (!arm_add_operand (hi_op2, SImode)) 1593 hi_op2 = force_reg (SImode, hi_op2); 1594 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3])); 1595 DONE; 1596 } 1597 1598 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode)) 1599 lo_op2 = force_reg (SImode, lo_op2); 1600 if (CONST_INT_P (lo_op2)) 1601 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2, 1602 gen_int_mode (-INTVAL (lo_op2), SImode))); 1603 else 1604 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2)); 1605 1606 highpart: 1607 if (!arm_not_operand (hi_op2, SImode)) 1608 hi_op2 = force_reg (SImode, hi_op2); 1609 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM); 1610 if (CONST_INT_P (hi_op2)) 1611 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2, 1612 GEN_INT (UINTVAL (hi_op2) & 0xffffffff), 1613 gen_rtx_LTU (SImode, ccreg, const0_rtx), 1614 gen_rtx_LTU (DImode, ccreg, 1615 const0_rtx))); 1616 else 1617 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2, 1618 gen_rtx_LTU (SImode, ccreg, const0_rtx), 1619 gen_rtx_LTU (DImode, ccreg, const0_rtx))); 1620 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]); 1621 1622 DONE; 1623}) 1624 1625(define_insn "subsi3_compare1" 1626 [(set (reg:CC CC_REGNUM) 1627 (compare:CC 1628 (match_operand:SI 1 "register_operand" "r") 1629 (match_operand:SI 2 "register_operand" "r"))) 1630 (set (match_operand:SI 0 "register_operand" "=r") 1631 (minus:SI (match_dup 1) (match_dup 2)))] 1632 "TARGET_32BIT" 1633 "subs%?\\t%0, %1, %2" 1634 [(set_attr "conds" "set") 1635 (set_attr "type" "alus_sreg")] 1636) 1637 1638(define_insn "subvsi3" 1639 [(set (reg:CC_V CC_REGNUM) 1640 (compare:CC_V 1641 (minus:DI 1642 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r")) 1643 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r"))) 1644 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2))))) 1645 (set (match_operand:SI 0 "s_register_operand" "=l,r") 1646 (minus:SI (match_dup 1) (match_dup 2)))] 1647 "TARGET_32BIT" 1648 "subs%?\\t%0, %1, %2" 1649 [(set_attr "conds" "set") 1650 (set_attr "arch" "t2,*") 1651 (set_attr "length" "2,4") 1652 (set_attr "type" "alus_sreg")] 1653) 1654 1655(define_insn "subvsi3_imm1" 1656 [(set (reg:CC_V CC_REGNUM) 1657 (compare:CC_V 1658 (minus:DI 1659 (match_operand 1 "arm_immediate_operand" "I") 1660 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))) 1661 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2))))) 1662 (set (match_operand:SI 0 "s_register_operand" "=r") 1663 (minus:SI (match_dup 1) (match_dup 2)))] 1664 "TARGET_32BIT" 1665 "rsbs%?\\t%0, %2, %1" 1666 [(set_attr "conds" "set") 1667 (set_attr "type" "alus_imm")] 1668) 1669 1670(define_insn "subsi3_carryin" 1671 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") 1672 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz") 1673 (match_operand:SI 2 "s_register_operand" "r,r,r")) 1674 (match_operand:SI 3 "arm_borrow_operation" "")))] 1675 "TARGET_32BIT" 1676 "@ 1677 sbc%?\\t%0, %1, %2 1678 rsc%?\\t%0, %2, %1 1679 sbc%?\\t%0, %2, %2, lsl #1" 1680 [(set_attr "conds" "use") 1681 (set_attr "arch" "*,a,t2") 1682 (set_attr "predicable" "yes") 1683 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm_lsl_1to4")] 1684) 1685 1686;; Special canonicalization of the above when operand1 == (const_int 1): 1687;; in this case the 'borrow' needs to treated like subtracting from the carry. 1688(define_insn "rsbsi_carryin_reg" 1689 [(set (match_operand:SI 0 "s_register_operand" "=r") 1690 (minus:SI (match_operand:SI 1 "arm_carry_operation" "") 1691 (match_operand:SI 2 "s_register_operand" "r")))] 1692 "TARGET_ARM" 1693 "rsc%?\\t%0, %2, #1" 1694 [(set_attr "conds" "use") 1695 (set_attr "predicable" "yes") 1696 (set_attr "type" "adc_imm")] 1697) 1698 1699;; SBC performs Rn - Rm - ~C, but -Rm = ~Rm + 1 => Rn + ~Rm + 1 - ~C 1700;; => Rn + ~Rm + C, which is essentially ADC Rd, Rn, ~Rm 1701(define_insn "*add_not_cin" 1702 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 1703 (plus:SI 1704 (plus:SI (not:SI (match_operand:SI 1 "s_register_operand" "r,r")) 1705 (match_operand:SI 3 "arm_carry_operation" "")) 1706 (match_operand:SI 2 "arm_rhs_operand" "r,I")))] 1707 "TARGET_ARM || (TARGET_THUMB2 && !CONST_INT_P (operands[2]))" 1708 "@ 1709 sbc%?\\t%0, %2, %1 1710 rsc%?\\t%0, %1, %2" 1711 [(set_attr "conds" "use") 1712 (set_attr "predicable" "yes") 1713 (set_attr "arch" "*,a") 1714 (set_attr "type" "adc_reg,adc_imm")] 1715) 1716 1717;; On Arm we can also use the same trick when the non-inverted operand is 1718;; shifted, using RSC. 1719(define_insn "add_not_shift_cin" 1720 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 1721 (plus:SI 1722 (plus:SI (match_operator:SI 3 "shift_operator" 1723 [(match_operand:SI 1 "s_register_operand" "r,r") 1724 (match_operand:SI 2 "shift_amount_operand" "M,r")]) 1725 (not:SI (match_operand:SI 4 "s_register_operand" "r,r"))) 1726 (match_operand:SI 5 "arm_carry_operation" "")))] 1727 "TARGET_ARM" 1728 "rsc%?\\t%0, %4, %1%S3" 1729 [(set_attr "conds" "use") 1730 (set_attr "predicable" "yes") 1731 (set_attr "autodetect_type" "alu_shift_operator3")] 1732) 1733 1734(define_insn "cmpsi3_carryin_<CC_EXTEND>out" 1735 [(set (reg:<CC_EXTEND> CC_REGNUM) 1736 (compare:<CC_EXTEND> 1737 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r")) 1738 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "") 1739 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r"))))) 1740 (clobber (match_scratch:SI 0 "=l,r"))] 1741 "TARGET_32BIT" 1742 "sbcs\\t%0, %1, %2" 1743 [(set_attr "conds" "set") 1744 (set_attr "arch" "t2,*") 1745 (set_attr "length" "2,4") 1746 (set_attr "type" "adc_reg")] 1747) 1748 1749;; Similar to the above, but handling a constant which has a different 1750;; canonicalization. 1751(define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out" 1752 [(set (reg:<CC_EXTEND> CC_REGNUM) 1753 (compare:<CC_EXTEND> 1754 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r")) 1755 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "") 1756 (match_operand:DI 2 "arm_adcimm_operand" "I,K")))) 1757 (clobber (match_scratch:SI 0 "=l,r"))] 1758 "TARGET_32BIT" 1759 "@ 1760 sbcs\\t%0, %1, %2 1761 adcs\\t%0, %1, #%B2" 1762 [(set_attr "conds" "set") 1763 (set_attr "type" "adc_imm")] 1764) 1765 1766;; Further canonicalization when the constant is zero. 1767(define_insn "cmpsi3_0_carryin_<CC_EXTEND>out" 1768 [(set (reg:<CC_EXTEND> CC_REGNUM) 1769 (compare:<CC_EXTEND> 1770 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r")) 1771 (match_operand:DI 2 "arm_borrow_operation" ""))) 1772 (clobber (match_scratch:SI 0 "=l,r"))] 1773 "TARGET_32BIT" 1774 "sbcs\\t%0, %1, #0" 1775 [(set_attr "conds" "set") 1776 (set_attr "type" "adc_imm")] 1777) 1778 1779(define_insn "*subsi3_carryin_const" 1780 [(set (match_operand:SI 0 "s_register_operand" "=r") 1781 (minus:SI (plus:SI 1782 (match_operand:SI 1 "s_register_operand" "r") 1783 (match_operand:SI 2 "arm_neg_immediate_operand" "L")) 1784 (match_operand:SI 3 "arm_borrow_operation" "")))] 1785 "TARGET_32BIT" 1786 "sbc\\t%0, %1, #%n2" 1787 [(set_attr "conds" "use") 1788 (set_attr "type" "adc_imm")] 1789) 1790 1791(define_insn "*subsi3_carryin_const0" 1792 [(set (match_operand:SI 0 "s_register_operand" "=r") 1793 (minus:SI (match_operand:SI 1 "s_register_operand" "r") 1794 (match_operand:SI 2 "arm_borrow_operation" "")))] 1795 "TARGET_32BIT" 1796 "sbc\\t%0, %1, #0" 1797 [(set_attr "conds" "use") 1798 (set_attr "type" "adc_imm")] 1799) 1800 1801(define_insn "*subsi3_carryin_shift" 1802 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 1803 (minus:SI (minus:SI 1804 (match_operand:SI 1 "s_register_operand" "r,r") 1805 (match_operator:SI 2 "shift_operator" 1806 [(match_operand:SI 3 "s_register_operand" "r,r") 1807 (match_operand:SI 4 "shift_amount_operand" "M,r")])) 1808 (match_operand:SI 5 "arm_borrow_operation" "")))] 1809 "TARGET_32BIT" 1810 "sbc%?\\t%0, %1, %3%S2" 1811 [(set_attr "conds" "use") 1812 (set_attr "arch" "32,a") 1813 (set_attr "shift" "3") 1814 (set_attr "predicable" "yes") 1815 (set_attr "autodetect_type" "alu_shift_operator2")] 1816) 1817 1818(define_insn "*subsi3_carryin_shift_alt" 1819 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 1820 (minus:SI (minus:SI 1821 (match_operand:SI 1 "s_register_operand" "r,r") 1822 (match_operand:SI 5 "arm_borrow_operation" "")) 1823 (match_operator:SI 2 "shift_operator" 1824 [(match_operand:SI 3 "s_register_operand" "r,r") 1825 (match_operand:SI 4 "shift_amount_operand" "M,r")])))] 1826 "TARGET_32BIT" 1827 "sbc%?\\t%0, %1, %3%S2" 1828 [(set_attr "conds" "use") 1829 (set_attr "arch" "32,a") 1830 (set_attr "shift" "3") 1831 (set_attr "predicable" "yes") 1832 (set_attr "autodetect_type" "alu_shift_operator2")] 1833) 1834 1835;; No RSC in Thumb2 1836(define_insn "*rsbsi3_carryin_shift" 1837 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 1838 (minus:SI (minus:SI 1839 (match_operator:SI 2 "shift_operator" 1840 [(match_operand:SI 3 "s_register_operand" "r,r") 1841 (match_operand:SI 4 "shift_amount_operand" "M,r")]) 1842 (match_operand:SI 1 "s_register_operand" "r,r")) 1843 (match_operand:SI 5 "arm_borrow_operation" "")))] 1844 "TARGET_ARM" 1845 "rsc%?\\t%0, %1, %3%S2" 1846 [(set_attr "conds" "use") 1847 (set_attr "predicable" "yes") 1848 (set_attr "autodetect_type" "alu_shift_operator2")] 1849) 1850 1851(define_insn "*rsbsi3_carryin_shift_alt" 1852 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 1853 (minus:SI (minus:SI 1854 (match_operator:SI 2 "shift_operator" 1855 [(match_operand:SI 3 "s_register_operand" "r,r") 1856 (match_operand:SI 4 "shift_amount_operand" "M,r")]) 1857 (match_operand:SI 5 "arm_borrow_operation" "")) 1858 (match_operand:SI 1 "s_register_operand" "r,r")))] 1859 "TARGET_ARM" 1860 "rsc%?\\t%0, %1, %3%S2" 1861 [(set_attr "conds" "use") 1862 (set_attr "predicable" "yes") 1863 (set_attr "autodetect_type" "alu_shift_operator2")] 1864) 1865 1866; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant. 1867(define_split 1868 [(set (match_operand:SI 0 "s_register_operand" "") 1869 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "") 1870 (match_operand:SI 2 "s_register_operand" "")) 1871 (const_int -1))) 1872 (clobber (match_operand:SI 3 "s_register_operand" ""))] 1873 "TARGET_32BIT" 1874 [(set (match_dup 3) (match_dup 1)) 1875 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))] 1876 " 1877 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1)); 1878") 1879 1880(define_expand "addsf3" 1881 [(set (match_operand:SF 0 "s_register_operand") 1882 (plus:SF (match_operand:SF 1 "s_register_operand") 1883 (match_operand:SF 2 "s_register_operand")))] 1884 "TARGET_32BIT && TARGET_HARD_FLOAT" 1885 " 1886") 1887 1888(define_expand "adddf3" 1889 [(set (match_operand:DF 0 "s_register_operand") 1890 (plus:DF (match_operand:DF 1 "s_register_operand") 1891 (match_operand:DF 2 "s_register_operand")))] 1892 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE" 1893 " 1894") 1895 1896(define_expand "subdi3" 1897 [(parallel 1898 [(set (match_operand:DI 0 "s_register_operand") 1899 (minus:DI (match_operand:DI 1 "reg_or_int_operand") 1900 (match_operand:DI 2 "s_register_operand"))) 1901 (clobber (reg:CC CC_REGNUM))])] 1902 "TARGET_EITHER" 1903 " 1904 if (TARGET_THUMB1) 1905 { 1906 if (!REG_P (operands[1])) 1907 operands[1] = force_reg (DImode, operands[1]); 1908 } 1909 else 1910 { 1911 rtx lo_result, hi_result, lo_dest, hi_dest; 1912 rtx lo_op1, hi_op1, lo_op2, hi_op2; 1913 rtx condition; 1914 1915 /* Since operands[1] may be an integer, pass it second, so that 1916 any necessary simplifications will be done on the decomposed 1917 constant. */ 1918 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2, 1919 &lo_op1, &hi_op1); 1920 lo_result = lo_dest = gen_lowpart (SImode, operands[0]); 1921 hi_result = hi_dest = gen_highpart (SImode, operands[0]); 1922 1923 if (!arm_rhs_operand (lo_op1, SImode)) 1924 lo_op1 = force_reg (SImode, lo_op1); 1925 1926 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode)) 1927 || !arm_rhs_operand (hi_op1, SImode)) 1928 hi_op1 = force_reg (SImode, hi_op1); 1929 1930 rtx cc_reg; 1931 if (lo_op1 == const0_rtx) 1932 { 1933 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM); 1934 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2)); 1935 } 1936 else if (CONST_INT_P (lo_op1)) 1937 { 1938 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM); 1939 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2, 1940 GEN_INT (~UINTVAL (lo_op1)))); 1941 } 1942 else 1943 { 1944 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM); 1945 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2)); 1946 } 1947 1948 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx); 1949 1950 if (hi_op1 == const0_rtx) 1951 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition)); 1952 else 1953 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition)); 1954 1955 if (lo_result != lo_dest) 1956 emit_move_insn (lo_result, lo_dest); 1957 1958 if (hi_result != hi_dest) 1959 emit_move_insn (hi_result, hi_dest); 1960 1961 DONE; 1962 } 1963 " 1964) 1965 1966(define_expand "subsi3" 1967 [(set (match_operand:SI 0 "s_register_operand") 1968 (minus:SI (match_operand:SI 1 "reg_or_int_operand") 1969 (match_operand:SI 2 "s_register_operand")))] 1970 "TARGET_EITHER" 1971 " 1972 if (CONST_INT_P (operands[1])) 1973 { 1974 if (TARGET_32BIT) 1975 { 1976 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS)) 1977 operands[1] = force_reg (SImode, operands[1]); 1978 else 1979 { 1980 arm_split_constant (MINUS, SImode, NULL_RTX, 1981 INTVAL (operands[1]), operands[0], 1982 operands[2], 1983 optimize && can_create_pseudo_p ()); 1984 DONE; 1985 } 1986 } 1987 else /* TARGET_THUMB1 */ 1988 operands[1] = force_reg (SImode, operands[1]); 1989 } 1990 " 1991) 1992 1993; ??? Check Thumb-2 split length 1994(define_insn_and_split "*arm_subsi3_insn" 1995 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r") 1996 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n") 1997 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))] 1998 "TARGET_32BIT" 1999 "@ 2000 sub%?\\t%0, %1, %2 2001 sub%?\\t%0, %2 2002 sub%?\\t%0, %1, %2 2003 rsb%?\\t%0, %2, %1 2004 rsb%?\\t%0, %2, %1 2005 sub%?\\t%0, %1, %2 2006 sub%?\\t%0, %1, %2 2007 sub%?\\t%0, %1, %2 2008 #" 2009 "&& (CONST_INT_P (operands[1]) 2010 && !const_ok_for_arm (INTVAL (operands[1])))" 2011 [(clobber (const_int 0))] 2012 " 2013 arm_split_constant (MINUS, SImode, curr_insn, 2014 INTVAL (operands[1]), operands[0], operands[2], 0); 2015 DONE; 2016 " 2017 [(set_attr "length" "4,4,4,4,4,4,4,4,16") 2018 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*") 2019 (set_attr "predicable" "yes") 2020 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no") 2021 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")] 2022) 2023 2024(define_peephole2 2025 [(match_scratch:SI 3 "r") 2026 (set (match_operand:SI 0 "arm_general_register_operand" "") 2027 (minus:SI (match_operand:SI 1 "const_int_operand" "") 2028 (match_operand:SI 2 "arm_general_register_operand" "")))] 2029 "TARGET_32BIT 2030 && !const_ok_for_arm (INTVAL (operands[1])) 2031 && const_ok_for_arm (~INTVAL (operands[1]))" 2032 [(set (match_dup 3) (match_dup 1)) 2033 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))] 2034 "" 2035) 2036 2037(define_insn "subsi3_compare0" 2038 [(set (reg:CC_NZ CC_REGNUM) 2039 (compare:CC_NZ 2040 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I") 2041 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")) 2042 (const_int 0))) 2043 (set (match_operand:SI 0 "s_register_operand" "=r,r,r") 2044 (minus:SI (match_dup 1) (match_dup 2)))] 2045 "TARGET_32BIT" 2046 "@ 2047 subs%?\\t%0, %1, %2 2048 subs%?\\t%0, %1, %2 2049 rsbs%?\\t%0, %2, %1" 2050 [(set_attr "conds" "set") 2051 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")] 2052) 2053 2054(define_insn "subsi3_compare" 2055 [(set (reg:CC CC_REGNUM) 2056 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I") 2057 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))) 2058 (set (match_operand:SI 0 "s_register_operand" "=r,r,r") 2059 (minus:SI (match_dup 1) (match_dup 2)))] 2060 "TARGET_32BIT" 2061 "@ 2062 subs%?\\t%0, %1, %2 2063 subs%?\\t%0, %1, %2 2064 rsbs%?\\t%0, %2, %1" 2065 [(set_attr "conds" "set") 2066 (set_attr "type" "alus_imm,alus_sreg,alus_imm")] 2067) 2068 2069;; To keep the comparison in canonical form we express it as (~reg cmp ~0) 2070;; rather than (0 cmp reg). This gives the same results for unsigned 2071;; and equality compares which is what we mostly need here. 2072(define_insn "rsb_imm_compare" 2073 [(set (reg:CC_RSB CC_REGNUM) 2074 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r")) 2075 (match_operand 3 "const_int_operand" ""))) 2076 (set (match_operand:SI 0 "s_register_operand" "=r") 2077 (minus:SI (match_operand 1 "arm_immediate_operand" "I") 2078 (match_dup 2)))] 2079 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])" 2080 "rsbs\\t%0, %2, %1" 2081 [(set_attr "conds" "set") 2082 (set_attr "type" "alus_imm")] 2083) 2084 2085;; Similarly, but the result is unused. 2086(define_insn "rsb_imm_compare_scratch" 2087 [(set (reg:CC_RSB CC_REGNUM) 2088 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r")) 2089 (match_operand 1 "arm_not_immediate_operand" "K"))) 2090 (clobber (match_scratch:SI 0 "=r"))] 2091 "TARGET_32BIT" 2092 "rsbs\\t%0, %2, #%B1" 2093 [(set_attr "conds" "set") 2094 (set_attr "type" "alus_imm")] 2095) 2096 2097;; Compare the sum of a value plus a carry against a constant. Uses 2098;; RSC, so the result is swapped. Only available on Arm 2099(define_insn "rscsi3_<CC_EXTEND>out_scratch" 2100 [(set (reg:CC_SWP CC_REGNUM) 2101 (compare:CC_SWP 2102 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r")) 2103 (match_operand:DI 3 "arm_borrow_operation" "")) 2104 (match_operand 1 "arm_immediate_operand" "I"))) 2105 (clobber (match_scratch:SI 0 "=r"))] 2106 "TARGET_ARM" 2107 "rscs\\t%0, %2, %1" 2108 [(set_attr "conds" "set") 2109 (set_attr "type" "alus_imm")] 2110) 2111 2112(define_insn "usubvsi3_borrow" 2113 [(set (reg:CC_B CC_REGNUM) 2114 (compare:CC_B 2115 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r")) 2116 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "") 2117 (zero_extend:DI 2118 (match_operand:SI 2 "s_register_operand" "l,r"))))) 2119 (set (match_operand:SI 0 "s_register_operand" "=l,r") 2120 (minus:SI (match_dup 1) 2121 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "") 2122 (match_dup 2))))] 2123 "TARGET_32BIT" 2124 "sbcs%?\\t%0, %1, %2" 2125 [(set_attr "conds" "set") 2126 (set_attr "arch" "t2,*") 2127 (set_attr "length" "2,4")] 2128) 2129 2130(define_insn "usubvsi3_borrow_imm" 2131 [(set (reg:CC_B CC_REGNUM) 2132 (compare:CC_B 2133 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")) 2134 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "") 2135 (match_operand:DI 3 "const_int_operand" "n,n")))) 2136 (set (match_operand:SI 0 "s_register_operand" "=r,r") 2137 (minus:SI (match_dup 1) 2138 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "") 2139 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))] 2140 "TARGET_32BIT 2141 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])" 2142 "@ 2143 sbcs%?\\t%0, %1, %2 2144 adcs%?\\t%0, %1, #%B2" 2145 [(set_attr "conds" "set") 2146 (set_attr "type" "alus_imm")] 2147) 2148 2149(define_insn "subvsi3_borrow" 2150 [(set (reg:CC_V CC_REGNUM) 2151 (compare:CC_V 2152 (minus:DI 2153 (minus:DI 2154 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r")) 2155 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r"))) 2156 (match_operand:DI 4 "arm_borrow_operation" "")) 2157 (sign_extend:DI 2158 (minus:SI (minus:SI (match_dup 1) (match_dup 2)) 2159 (match_operand:SI 3 "arm_borrow_operation" ""))))) 2160 (set (match_operand:SI 0 "s_register_operand" "=l,r") 2161 (minus:SI (minus:SI (match_dup 1) (match_dup 2)) 2162 (match_dup 3)))] 2163 "TARGET_32BIT" 2164 "sbcs%?\\t%0, %1, %2" 2165 [(set_attr "conds" "set") 2166 (set_attr "arch" "t2,*") 2167 (set_attr "length" "2,4")] 2168) 2169 2170(define_insn "subvsi3_borrow_imm" 2171 [(set (reg:CC_V CC_REGNUM) 2172 (compare:CC_V 2173 (minus:DI 2174 (minus:DI 2175 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")) 2176 (match_operand 2 "arm_adcimm_operand" "I,K")) 2177 (match_operand:DI 4 "arm_borrow_operation" "")) 2178 (sign_extend:DI 2179 (minus:SI (minus:SI (match_dup 1) (match_dup 2)) 2180 (match_operand:SI 3 "arm_borrow_operation" ""))))) 2181 (set (match_operand:SI 0 "s_register_operand" "=r,r") 2182 (minus:SI (minus:SI (match_dup 1) (match_dup 2)) 2183 (match_dup 3)))] 2184 "TARGET_32BIT 2185 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))" 2186 "@ 2187 sbcs%?\\t%0, %1, %2 2188 adcs%?\\t%0, %1, #%B2" 2189 [(set_attr "conds" "set") 2190 (set_attr "type" "alus_imm")] 2191) 2192 2193(define_expand "subsf3" 2194 [(set (match_operand:SF 0 "s_register_operand") 2195 (minus:SF (match_operand:SF 1 "s_register_operand") 2196 (match_operand:SF 2 "s_register_operand")))] 2197 "TARGET_32BIT && TARGET_HARD_FLOAT" 2198 " 2199") 2200 2201(define_expand "subdf3" 2202 [(set (match_operand:DF 0 "s_register_operand") 2203 (minus:DF (match_operand:DF 1 "s_register_operand") 2204 (match_operand:DF 2 "s_register_operand")))] 2205 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE" 2206 " 2207") 2208 2209 2210;; Multiplication insns 2211 2212(define_expand "mulhi3" 2213 [(set (match_operand:HI 0 "s_register_operand") 2214 (mult:HI (match_operand:HI 1 "s_register_operand") 2215 (match_operand:HI 2 "s_register_operand")))] 2216 "TARGET_DSP_MULTIPLY" 2217 " 2218 { 2219 rtx result = gen_reg_rtx (SImode); 2220 emit_insn (gen_mulhisi3 (result, operands[1], operands[2])); 2221 emit_move_insn (operands[0], gen_lowpart (HImode, result)); 2222 DONE; 2223 }" 2224) 2225 2226(define_expand "mulsi3" 2227 [(set (match_operand:SI 0 "s_register_operand") 2228 (mult:SI (match_operand:SI 2 "s_register_operand") 2229 (match_operand:SI 1 "s_register_operand")))] 2230 "TARGET_EITHER" 2231 "" 2232) 2233 2234;; Use `&' and then `0' to prevent operands 0 and 2 being the same 2235(define_insn "*mul" 2236 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r") 2237 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r") 2238 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))] 2239 "TARGET_32BIT" 2240 "mul%?\\t%0, %2, %1" 2241 [(set_attr "type" "mul") 2242 (set_attr "predicable" "yes") 2243 (set_attr "arch" "t2,v6,nov6,nov6") 2244 (set_attr "length" "4") 2245 (set_attr "predicable_short_it" "yes,no,*,*")] 2246) 2247 2248;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer 2249;; reusing the same register. 2250 2251(define_insn "*mla" 2252 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r") 2253 (plus:SI 2254 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r") 2255 (match_operand:SI 2 "s_register_operand" "%r,r,0,r")) 2256 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))] 2257 "TARGET_32BIT" 2258 "mla%?\\t%0, %3, %2, %1" 2259 [(set_attr "type" "mla") 2260 (set_attr "predicable" "yes") 2261 (set_attr "arch" "v6,nov6,nov6,nov6")] 2262) 2263 2264(define_insn "*mls" 2265 [(set (match_operand:SI 0 "s_register_operand" "=r") 2266 (minus:SI 2267 (match_operand:SI 1 "s_register_operand" "r") 2268 (mult:SI (match_operand:SI 3 "s_register_operand" "r") 2269 (match_operand:SI 2 "s_register_operand" "r"))))] 2270 "TARGET_32BIT && arm_arch_thumb2" 2271 "mls%?\\t%0, %3, %2, %1" 2272 [(set_attr "type" "mla") 2273 (set_attr "predicable" "yes")] 2274) 2275 2276(define_insn "*mulsi3_compare0" 2277 [(set (reg:CC_NZ CC_REGNUM) 2278 (compare:CC_NZ (mult:SI 2279 (match_operand:SI 2 "s_register_operand" "r,r") 2280 (match_operand:SI 1 "s_register_operand" "%0,r")) 2281 (const_int 0))) 2282 (set (match_operand:SI 0 "s_register_operand" "=&r,&r") 2283 (mult:SI (match_dup 2) (match_dup 1)))] 2284 "TARGET_ARM && !arm_arch6" 2285 "muls%?\\t%0, %2, %1" 2286 [(set_attr "conds" "set") 2287 (set_attr "type" "muls")] 2288) 2289 2290(define_insn "*mulsi3_compare0_v6" 2291 [(set (reg:CC_NZ CC_REGNUM) 2292 (compare:CC_NZ (mult:SI 2293 (match_operand:SI 2 "s_register_operand" "r") 2294 (match_operand:SI 1 "s_register_operand" "r")) 2295 (const_int 0))) 2296 (set (match_operand:SI 0 "s_register_operand" "=r") 2297 (mult:SI (match_dup 2) (match_dup 1)))] 2298 "TARGET_ARM && arm_arch6 && optimize_size" 2299 "muls%?\\t%0, %2, %1" 2300 [(set_attr "conds" "set") 2301 (set_attr "type" "muls")] 2302) 2303 2304(define_insn "*mulsi_compare0_scratch" 2305 [(set (reg:CC_NZ CC_REGNUM) 2306 (compare:CC_NZ (mult:SI 2307 (match_operand:SI 2 "s_register_operand" "r,r") 2308 (match_operand:SI 1 "s_register_operand" "%0,r")) 2309 (const_int 0))) 2310 (clobber (match_scratch:SI 0 "=&r,&r"))] 2311 "TARGET_ARM && !arm_arch6" 2312 "muls%?\\t%0, %2, %1" 2313 [(set_attr "conds" "set") 2314 (set_attr "type" "muls")] 2315) 2316 2317(define_insn "*mulsi_compare0_scratch_v6" 2318 [(set (reg:CC_NZ CC_REGNUM) 2319 (compare:CC_NZ (mult:SI 2320 (match_operand:SI 2 "s_register_operand" "r") 2321 (match_operand:SI 1 "s_register_operand" "r")) 2322 (const_int 0))) 2323 (clobber (match_scratch:SI 0 "=r"))] 2324 "TARGET_ARM && arm_arch6 && optimize_size" 2325 "muls%?\\t%0, %2, %1" 2326 [(set_attr "conds" "set") 2327 (set_attr "type" "muls")] 2328) 2329 2330(define_insn "*mulsi3addsi_compare0" 2331 [(set (reg:CC_NZ CC_REGNUM) 2332 (compare:CC_NZ 2333 (plus:SI (mult:SI 2334 (match_operand:SI 2 "s_register_operand" "r,r,r,r") 2335 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")) 2336 (match_operand:SI 3 "s_register_operand" "r,r,0,0")) 2337 (const_int 0))) 2338 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r") 2339 (plus:SI (mult:SI (match_dup 2) (match_dup 1)) 2340 (match_dup 3)))] 2341 "TARGET_ARM && arm_arch6" 2342 "mlas%?\\t%0, %2, %1, %3" 2343 [(set_attr "conds" "set") 2344 (set_attr "type" "mlas")] 2345) 2346 2347(define_insn "*mulsi3addsi_compare0_v6" 2348 [(set (reg:CC_NZ CC_REGNUM) 2349 (compare:CC_NZ 2350 (plus:SI (mult:SI 2351 (match_operand:SI 2 "s_register_operand" "r") 2352 (match_operand:SI 1 "s_register_operand" "r")) 2353 (match_operand:SI 3 "s_register_operand" "r")) 2354 (const_int 0))) 2355 (set (match_operand:SI 0 "s_register_operand" "=r") 2356 (plus:SI (mult:SI (match_dup 2) (match_dup 1)) 2357 (match_dup 3)))] 2358 "TARGET_ARM && arm_arch6 && optimize_size" 2359 "mlas%?\\t%0, %2, %1, %3" 2360 [(set_attr "conds" "set") 2361 (set_attr "type" "mlas")] 2362) 2363 2364(define_insn "*mulsi3addsi_compare0_scratch" 2365 [(set (reg:CC_NZ CC_REGNUM) 2366 (compare:CC_NZ 2367 (plus:SI (mult:SI 2368 (match_operand:SI 2 "s_register_operand" "r,r,r,r") 2369 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")) 2370 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")) 2371 (const_int 0))) 2372 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))] 2373 "TARGET_ARM && !arm_arch6" 2374 "mlas%?\\t%0, %2, %1, %3" 2375 [(set_attr "conds" "set") 2376 (set_attr "type" "mlas")] 2377) 2378 2379(define_insn "*mulsi3addsi_compare0_scratch_v6" 2380 [(set (reg:CC_NZ CC_REGNUM) 2381 (compare:CC_NZ 2382 (plus:SI (mult:SI 2383 (match_operand:SI 2 "s_register_operand" "r") 2384 (match_operand:SI 1 "s_register_operand" "r")) 2385 (match_operand:SI 3 "s_register_operand" "r")) 2386 (const_int 0))) 2387 (clobber (match_scratch:SI 0 "=r"))] 2388 "TARGET_ARM && arm_arch6 && optimize_size" 2389 "mlas%?\\t%0, %2, %1, %3" 2390 [(set_attr "conds" "set") 2391 (set_attr "type" "mlas")] 2392) 2393 2394;; 32x32->64 widening multiply. 2395;; The only difference between the v3-5 and v6+ versions is the requirement 2396;; that the output does not overlap with either input. 2397 2398(define_expand "<Us>mulsidi3" 2399 [(set (match_operand:DI 0 "s_register_operand") 2400 (mult:DI 2401 (SE:DI (match_operand:SI 1 "s_register_operand")) 2402 (SE:DI (match_operand:SI 2 "s_register_operand"))))] 2403 "TARGET_32BIT" 2404 { 2405 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]), 2406 gen_highpart (SImode, operands[0]), 2407 operands[1], operands[2])); 2408 DONE; 2409 } 2410) 2411 2412(define_insn "<US>mull" 2413 [(set (match_operand:SI 0 "s_register_operand" "=r,&r") 2414 (mult:SI 2415 (match_operand:SI 2 "s_register_operand" "%r,r") 2416 (match_operand:SI 3 "s_register_operand" "r,r"))) 2417 (set (match_operand:SI 1 "s_register_operand" "=r,&r") 2418 (truncate:SI 2419 (lshiftrt:DI 2420 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3))) 2421 (const_int 32))))] 2422 "TARGET_32BIT" 2423 "<US>mull%?\\t%0, %1, %2, %3" 2424 [(set_attr "type" "umull") 2425 (set_attr "predicable" "yes") 2426 (set_attr "arch" "v6,nov6")] 2427) 2428 2429(define_expand "<Us>maddsidi4" 2430 [(set (match_operand:DI 0 "s_register_operand") 2431 (plus:DI 2432 (mult:DI 2433 (SE:DI (match_operand:SI 1 "s_register_operand")) 2434 (SE:DI (match_operand:SI 2 "s_register_operand"))) 2435 (match_operand:DI 3 "s_register_operand")))] 2436 "TARGET_32BIT" 2437 { 2438 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]), 2439 gen_lowpart (SImode, operands[3]), 2440 gen_highpart (SImode, operands[0]), 2441 gen_highpart (SImode, operands[3]), 2442 operands[1], operands[2])); 2443 DONE; 2444 } 2445) 2446 2447(define_insn "<US>mlal" 2448 [(set (match_operand:SI 0 "s_register_operand" "=r,&r") 2449 (plus:SI 2450 (mult:SI 2451 (match_operand:SI 4 "s_register_operand" "%r,r") 2452 (match_operand:SI 5 "s_register_operand" "r,r")) 2453 (match_operand:SI 1 "s_register_operand" "0,0"))) 2454 (set (match_operand:SI 2 "s_register_operand" "=r,&r") 2455 (plus:SI 2456 (truncate:SI 2457 (lshiftrt:DI 2458 (plus:DI 2459 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5))) 2460 (zero_extend:DI (match_dup 1))) 2461 (const_int 32))) 2462 (match_operand:SI 3 "s_register_operand" "2,2")))] 2463 "TARGET_32BIT" 2464 "<US>mlal%?\\t%0, %2, %4, %5" 2465 [(set_attr "type" "umlal") 2466 (set_attr "predicable" "yes") 2467 (set_attr "arch" "v6,nov6")] 2468) 2469 2470(define_expand "<US>mulsi3_highpart" 2471 [(parallel 2472 [(set (match_operand:SI 0 "s_register_operand") 2473 (truncate:SI 2474 (lshiftrt:DI 2475 (mult:DI 2476 (SE:DI (match_operand:SI 1 "s_register_operand")) 2477 (SE:DI (match_operand:SI 2 "s_register_operand"))) 2478 (const_int 32)))) 2479 (clobber (match_scratch:SI 3 ""))])] 2480 "TARGET_32BIT" 2481 "" 2482) 2483 2484(define_insn "*<US>mull_high" 2485 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r") 2486 (truncate:SI 2487 (lshiftrt:DI 2488 (mult:DI 2489 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r")) 2490 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r"))) 2491 (const_int 32)))) 2492 (clobber (match_scratch:SI 3 "=r,&r,&r"))] 2493 "TARGET_32BIT" 2494 "<US>mull%?\\t%3, %0, %2, %1" 2495 [(set_attr "type" "umull") 2496 (set_attr "predicable" "yes") 2497 (set_attr "arch" "v6,nov6,nov6")] 2498) 2499 2500(define_insn "mulhisi3" 2501 [(set (match_operand:SI 0 "s_register_operand" "=r") 2502 (mult:SI (sign_extend:SI 2503 (match_operand:HI 1 "s_register_operand" "%r")) 2504 (sign_extend:SI 2505 (match_operand:HI 2 "s_register_operand" "r"))))] 2506 "TARGET_DSP_MULTIPLY" 2507 "smulbb%?\\t%0, %1, %2" 2508 [(set_attr "type" "smulxy") 2509 (set_attr "predicable" "yes")] 2510) 2511 2512(define_insn "*mulhisi3tb" 2513 [(set (match_operand:SI 0 "s_register_operand" "=r") 2514 (mult:SI (ashiftrt:SI 2515 (match_operand:SI 1 "s_register_operand" "r") 2516 (const_int 16)) 2517 (sign_extend:SI 2518 (match_operand:HI 2 "s_register_operand" "r"))))] 2519 "TARGET_DSP_MULTIPLY" 2520 "smultb%?\\t%0, %1, %2" 2521 [(set_attr "type" "smulxy") 2522 (set_attr "predicable" "yes")] 2523) 2524 2525(define_insn "*mulhisi3bt" 2526 [(set (match_operand:SI 0 "s_register_operand" "=r") 2527 (mult:SI (sign_extend:SI 2528 (match_operand:HI 1 "s_register_operand" "r")) 2529 (ashiftrt:SI 2530 (match_operand:SI 2 "s_register_operand" "r") 2531 (const_int 16))))] 2532 "TARGET_DSP_MULTIPLY" 2533 "smulbt%?\\t%0, %1, %2" 2534 [(set_attr "type" "smulxy") 2535 (set_attr "predicable" "yes")] 2536) 2537 2538(define_insn "*mulhisi3tt" 2539 [(set (match_operand:SI 0 "s_register_operand" "=r") 2540 (mult:SI (ashiftrt:SI 2541 (match_operand:SI 1 "s_register_operand" "r") 2542 (const_int 16)) 2543 (ashiftrt:SI 2544 (match_operand:SI 2 "s_register_operand" "r") 2545 (const_int 16))))] 2546 "TARGET_DSP_MULTIPLY" 2547 "smultt%?\\t%0, %1, %2" 2548 [(set_attr "type" "smulxy") 2549 (set_attr "predicable" "yes")] 2550) 2551 2552(define_expand "maddhisi4" 2553 [(set (match_operand:SI 0 "s_register_operand") 2554 (plus:SI (mult:SI (sign_extend:SI 2555 (match_operand:HI 1 "s_register_operand")) 2556 (sign_extend:SI 2557 (match_operand:HI 2 "s_register_operand"))) 2558 (match_operand:SI 3 "s_register_operand")))] 2559 "TARGET_DSP_MULTIPLY" 2560 { 2561 /* If this function reads the Q bit from ACLE intrinsics break up the 2562 multiplication and accumulation as an overflow during accumulation will 2563 clobber the Q flag. */ 2564 if (ARM_Q_BIT_READ) 2565 { 2566 rtx tmp = gen_reg_rtx (SImode); 2567 emit_insn (gen_mulhisi3 (tmp, operands[1], operands[2])); 2568 emit_insn (gen_addsi3 (operands[0], tmp, operands[3])); 2569 DONE; 2570 } 2571 } 2572) 2573 2574(define_insn "*arm_maddhisi4" 2575 [(set (match_operand:SI 0 "s_register_operand" "=r") 2576 (plus:SI (mult:SI (sign_extend:SI 2577 (match_operand:HI 1 "s_register_operand" "r")) 2578 (sign_extend:SI 2579 (match_operand:HI 2 "s_register_operand" "r"))) 2580 (match_operand:SI 3 "s_register_operand" "r")))] 2581 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ" 2582 "smlabb%?\\t%0, %1, %2, %3" 2583 [(set_attr "type" "smlaxy") 2584 (set_attr "predicable" "yes")] 2585) 2586 2587(define_insn "arm_smlabb_setq" 2588 [(set (match_operand:SI 0 "s_register_operand" "=r") 2589 (plus:SI (mult:SI (sign_extend:SI 2590 (match_operand:HI 1 "s_register_operand" "r")) 2591 (sign_extend:SI 2592 (match_operand:HI 2 "s_register_operand" "r"))) 2593 (match_operand:SI 3 "s_register_operand" "r"))) 2594 (set (reg:CC APSRQ_REGNUM) 2595 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))] 2596 "TARGET_DSP_MULTIPLY" 2597 "smlabb%?\\t%0, %1, %2, %3" 2598 [(set_attr "type" "smlaxy") 2599 (set_attr "predicable" "yes")] 2600) 2601 2602(define_expand "arm_smlabb" 2603 [(match_operand:SI 0 "s_register_operand") 2604 (match_operand:SI 1 "s_register_operand") 2605 (match_operand:SI 2 "s_register_operand") 2606 (match_operand:SI 3 "s_register_operand")] 2607 "TARGET_DSP_MULTIPLY" 2608 { 2609 rtx mult1 = gen_lowpart (HImode, operands[1]); 2610 rtx mult2 = gen_lowpart (HImode, operands[2]); 2611 if (ARM_Q_BIT_READ) 2612 emit_insn (gen_arm_smlabb_setq (operands[0], mult1, mult2, operands[3])); 2613 else 2614 emit_insn (gen_maddhisi4 (operands[0], mult1, mult2, operands[3])); 2615 DONE; 2616 } 2617) 2618 2619;; Note: there is no maddhisi4ibt because this one is canonical form 2620(define_insn "maddhisi4tb" 2621 [(set (match_operand:SI 0 "s_register_operand" "=r") 2622 (plus:SI (mult:SI (ashiftrt:SI 2623 (match_operand:SI 1 "s_register_operand" "r") 2624 (const_int 16)) 2625 (sign_extend:SI 2626 (match_operand:HI 2 "s_register_operand" "r"))) 2627 (match_operand:SI 3 "s_register_operand" "r")))] 2628 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ" 2629 "smlatb%?\\t%0, %1, %2, %3" 2630 [(set_attr "type" "smlaxy") 2631 (set_attr "predicable" "yes")] 2632) 2633 2634(define_insn "arm_smlatb_setq" 2635 [(set (match_operand:SI 0 "s_register_operand" "=r") 2636 (plus:SI (mult:SI (ashiftrt:SI 2637 (match_operand:SI 1 "s_register_operand" "r") 2638 (const_int 16)) 2639 (sign_extend:SI 2640 (match_operand:HI 2 "s_register_operand" "r"))) 2641 (match_operand:SI 3 "s_register_operand" "r"))) 2642 (set (reg:CC APSRQ_REGNUM) 2643 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))] 2644 "TARGET_DSP_MULTIPLY" 2645 "smlatb%?\\t%0, %1, %2, %3" 2646 [(set_attr "type" "smlaxy") 2647 (set_attr "predicable" "yes")] 2648) 2649 2650(define_expand "arm_smlatb" 2651 [(match_operand:SI 0 "s_register_operand") 2652 (match_operand:SI 1 "s_register_operand") 2653 (match_operand:SI 2 "s_register_operand") 2654 (match_operand:SI 3 "s_register_operand")] 2655 "TARGET_DSP_MULTIPLY" 2656 { 2657 rtx mult2 = gen_lowpart (HImode, operands[2]); 2658 if (ARM_Q_BIT_READ) 2659 emit_insn (gen_arm_smlatb_setq (operands[0], operands[1], 2660 mult2, operands[3])); 2661 else 2662 emit_insn (gen_maddhisi4tb (operands[0], operands[1], 2663 mult2, operands[3])); 2664 DONE; 2665 } 2666) 2667 2668(define_insn "maddhisi4tt" 2669 [(set (match_operand:SI 0 "s_register_operand" "=r") 2670 (plus:SI (mult:SI (ashiftrt:SI 2671 (match_operand:SI 1 "s_register_operand" "r") 2672 (const_int 16)) 2673 (ashiftrt:SI 2674 (match_operand:SI 2 "s_register_operand" "r") 2675 (const_int 16))) 2676 (match_operand:SI 3 "s_register_operand" "r")))] 2677 "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ" 2678 "smlatt%?\\t%0, %1, %2, %3" 2679 [(set_attr "type" "smlaxy") 2680 (set_attr "predicable" "yes")] 2681) 2682 2683(define_insn "arm_smlatt_setq" 2684 [(set (match_operand:SI 0 "s_register_operand" "=r") 2685 (plus:SI (mult:SI (ashiftrt:SI 2686 (match_operand:SI 1 "s_register_operand" "r") 2687 (const_int 16)) 2688 (ashiftrt:SI 2689 (match_operand:SI 2 "s_register_operand" "r") 2690 (const_int 16))) 2691 (match_operand:SI 3 "s_register_operand" "r"))) 2692 (set (reg:CC APSRQ_REGNUM) 2693 (unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))] 2694 "TARGET_DSP_MULTIPLY" 2695 "smlatt%?\\t%0, %1, %2, %3" 2696 [(set_attr "type" "smlaxy") 2697 (set_attr "predicable" "yes")] 2698) 2699 2700(define_expand "arm_smlatt" 2701 [(match_operand:SI 0 "s_register_operand") 2702 (match_operand:SI 1 "s_register_operand") 2703 (match_operand:SI 2 "s_register_operand") 2704 (match_operand:SI 3 "s_register_operand")] 2705 "TARGET_DSP_MULTIPLY" 2706 { 2707 if (ARM_Q_BIT_READ) 2708 emit_insn (gen_arm_smlatt_setq (operands[0], operands[1], 2709 operands[2], operands[3])); 2710 else 2711 emit_insn (gen_maddhisi4tt (operands[0], operands[1], 2712 operands[2], operands[3])); 2713 DONE; 2714 } 2715) 2716 2717(define_insn "maddhidi4" 2718 [(set (match_operand:DI 0 "s_register_operand" "=r") 2719 (plus:DI 2720 (mult:DI (sign_extend:DI 2721 (match_operand:HI 1 "s_register_operand" "r")) 2722 (sign_extend:DI 2723 (match_operand:HI 2 "s_register_operand" "r"))) 2724 (match_operand:DI 3 "s_register_operand" "0")))] 2725 "TARGET_DSP_MULTIPLY" 2726 "smlalbb%?\\t%Q0, %R0, %1, %2" 2727 [(set_attr "type" "smlalxy") 2728 (set_attr "predicable" "yes")]) 2729 2730;; Note: there is no maddhidi4ibt because this one is canonical form 2731(define_insn "*maddhidi4tb" 2732 [(set (match_operand:DI 0 "s_register_operand" "=r") 2733 (plus:DI 2734 (mult:DI (sign_extend:DI 2735 (ashiftrt:SI 2736 (match_operand:SI 1 "s_register_operand" "r") 2737 (const_int 16))) 2738 (sign_extend:DI 2739 (match_operand:HI 2 "s_register_operand" "r"))) 2740 (match_operand:DI 3 "s_register_operand" "0")))] 2741 "TARGET_DSP_MULTIPLY" 2742 "smlaltb%?\\t%Q0, %R0, %1, %2" 2743 [(set_attr "type" "smlalxy") 2744 (set_attr "predicable" "yes")]) 2745 2746(define_insn "*maddhidi4tt" 2747 [(set (match_operand:DI 0 "s_register_operand" "=r") 2748 (plus:DI 2749 (mult:DI (sign_extend:DI 2750 (ashiftrt:SI 2751 (match_operand:SI 1 "s_register_operand" "r") 2752 (const_int 16))) 2753 (sign_extend:DI 2754 (ashiftrt:SI 2755 (match_operand:SI 2 "s_register_operand" "r") 2756 (const_int 16)))) 2757 (match_operand:DI 3 "s_register_operand" "0")))] 2758 "TARGET_DSP_MULTIPLY" 2759 "smlaltt%?\\t%Q0, %R0, %1, %2" 2760 [(set_attr "type" "smlalxy") 2761 (set_attr "predicable" "yes")]) 2762 2763(define_insn "arm_<smlaw_op><add_clobber_q_name>_insn" 2764 [(set (match_operand:SI 0 "s_register_operand" "=r") 2765 (unspec:SI 2766 [(match_operand:SI 1 "s_register_operand" "r") 2767 (match_operand:SI 2 "s_register_operand" "r") 2768 (match_operand:SI 3 "s_register_operand" "r")] 2769 SMLAWBT))] 2770 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>" 2771 "<smlaw_op>%?\\t%0, %1, %2, %3" 2772 [(set_attr "type" "smlaxy") 2773 (set_attr "predicable" "yes")] 2774) 2775 2776(define_expand "arm_<smlaw_op>" 2777 [(set (match_operand:SI 0 "s_register_operand") 2778 (unspec:SI 2779 [(match_operand:SI 1 "s_register_operand") 2780 (match_operand:SI 2 "s_register_operand") 2781 (match_operand:SI 3 "s_register_operand")] 2782 SMLAWBT))] 2783 "TARGET_DSP_MULTIPLY" 2784 { 2785 if (ARM_Q_BIT_READ) 2786 emit_insn (gen_arm_<smlaw_op>_setq_insn (operands[0], operands[1], 2787 operands[2], operands[3])); 2788 else 2789 emit_insn (gen_arm_<smlaw_op>_insn (operands[0], operands[1], 2790 operands[2], operands[3])); 2791 DONE; 2792 } 2793) 2794 2795(define_expand "mulsf3" 2796 [(set (match_operand:SF 0 "s_register_operand") 2797 (mult:SF (match_operand:SF 1 "s_register_operand") 2798 (match_operand:SF 2 "s_register_operand")))] 2799 "TARGET_32BIT && TARGET_HARD_FLOAT" 2800 " 2801") 2802 2803(define_expand "muldf3" 2804 [(set (match_operand:DF 0 "s_register_operand") 2805 (mult:DF (match_operand:DF 1 "s_register_operand") 2806 (match_operand:DF 2 "s_register_operand")))] 2807 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE" 2808 " 2809") 2810 2811;; Division insns 2812 2813(define_expand "divsf3" 2814 [(set (match_operand:SF 0 "s_register_operand") 2815 (div:SF (match_operand:SF 1 "s_register_operand") 2816 (match_operand:SF 2 "s_register_operand")))] 2817 "TARGET_32BIT && TARGET_HARD_FLOAT" 2818 "") 2819 2820(define_expand "divdf3" 2821 [(set (match_operand:DF 0 "s_register_operand") 2822 (div:DF (match_operand:DF 1 "s_register_operand") 2823 (match_operand:DF 2 "s_register_operand")))] 2824 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE" 2825 "") 2826 2827 2828; Expand logical operations. The mid-end expander does not split off memory 2829; operands or complex immediates, which leads to fewer LDRD/STRD instructions. 2830; So an explicit expander is needed to generate better code. 2831 2832(define_expand "<LOGICAL:optab>di3" 2833 [(set (match_operand:DI 0 "s_register_operand") 2834 (LOGICAL:DI (match_operand:DI 1 "s_register_operand") 2835 (match_operand:DI 2 "arm_<optab>di_operand")))] 2836 "TARGET_32BIT" 2837 { 2838 rtx low = simplify_gen_binary (<CODE>, SImode, 2839 gen_lowpart (SImode, operands[1]), 2840 gen_lowpart (SImode, operands[2])); 2841 rtx high = simplify_gen_binary (<CODE>, SImode, 2842 gen_highpart (SImode, operands[1]), 2843 gen_highpart_mode (SImode, DImode, 2844 operands[2])); 2845 2846 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low)); 2847 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high)); 2848 DONE; 2849 } 2850) 2851 2852(define_expand "one_cmpldi2" 2853 [(set (match_operand:DI 0 "s_register_operand") 2854 (not:DI (match_operand:DI 1 "s_register_operand")))] 2855 "TARGET_32BIT" 2856 { 2857 rtx low = simplify_gen_unary (NOT, SImode, 2858 gen_lowpart (SImode, operands[1]), 2859 SImode); 2860 rtx high = simplify_gen_unary (NOT, SImode, 2861 gen_highpart_mode (SImode, DImode, 2862 operands[1]), 2863 SImode); 2864 2865 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low)); 2866 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high)); 2867 DONE; 2868 } 2869) 2870 2871;; Split DImode and, ior, xor operations. Simply perform the logical 2872;; operation on the upper and lower halves of the registers. 2873;; This is needed for atomic operations in arm_split_atomic_op. 2874;; Avoid splitting IWMMXT instructions. 2875(define_split 2876 [(set (match_operand:DI 0 "s_register_operand" "") 2877 (match_operator:DI 6 "logical_binary_operator" 2878 [(match_operand:DI 1 "s_register_operand" "") 2879 (match_operand:DI 2 "s_register_operand" "")]))] 2880 "TARGET_32BIT && reload_completed 2881 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))" 2882 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)])) 2883 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))] 2884 " 2885 { 2886 operands[3] = gen_highpart (SImode, operands[0]); 2887 operands[0] = gen_lowpart (SImode, operands[0]); 2888 operands[4] = gen_highpart (SImode, operands[1]); 2889 operands[1] = gen_lowpart (SImode, operands[1]); 2890 operands[5] = gen_highpart (SImode, operands[2]); 2891 operands[2] = gen_lowpart (SImode, operands[2]); 2892 }" 2893) 2894 2895;; Split DImode not (needed for atomic operations in arm_split_atomic_op). 2896;; Unconditionally split since there is no SIMD DImode NOT pattern. 2897(define_split 2898 [(set (match_operand:DI 0 "s_register_operand") 2899 (not:DI (match_operand:DI 1 "s_register_operand")))] 2900 "TARGET_32BIT" 2901 [(set (match_dup 0) (not:SI (match_dup 1))) 2902 (set (match_dup 2) (not:SI (match_dup 3)))] 2903 " 2904 { 2905 operands[2] = gen_highpart (SImode, operands[0]); 2906 operands[0] = gen_lowpart (SImode, operands[0]); 2907 operands[3] = gen_highpart (SImode, operands[1]); 2908 operands[1] = gen_lowpart (SImode, operands[1]); 2909 }" 2910) 2911 2912(define_expand "andsi3" 2913 [(set (match_operand:SI 0 "s_register_operand") 2914 (and:SI (match_operand:SI 1 "s_register_operand") 2915 (match_operand:SI 2 "reg_or_int_operand")))] 2916 "TARGET_EITHER" 2917 " 2918 if (TARGET_32BIT) 2919 { 2920 if (CONST_INT_P (operands[2])) 2921 { 2922 if (INTVAL (operands[2]) == 255 && arm_arch6) 2923 { 2924 operands[1] = convert_to_mode (QImode, operands[1], 1); 2925 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0], 2926 operands[1])); 2927 DONE; 2928 } 2929 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND)) 2930 operands[2] = force_reg (SImode, operands[2]); 2931 else 2932 { 2933 arm_split_constant (AND, SImode, NULL_RTX, 2934 INTVAL (operands[2]), operands[0], 2935 operands[1], 2936 optimize && can_create_pseudo_p ()); 2937 2938 DONE; 2939 } 2940 } 2941 } 2942 else /* TARGET_THUMB1 */ 2943 { 2944 if (!CONST_INT_P (operands[2])) 2945 { 2946 rtx tmp = force_reg (SImode, operands[2]); 2947 if (rtx_equal_p (operands[0], operands[1])) 2948 operands[2] = tmp; 2949 else 2950 { 2951 operands[2] = operands[1]; 2952 operands[1] = tmp; 2953 } 2954 } 2955 else 2956 { 2957 int i; 2958 2959 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256) 2960 { 2961 operands[2] = force_reg (SImode, 2962 GEN_INT (~INTVAL (operands[2]))); 2963 2964 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1])); 2965 2966 DONE; 2967 } 2968 2969 for (i = 9; i <= 31; i++) 2970 { 2971 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2])) 2972 { 2973 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i), 2974 const0_rtx)); 2975 DONE; 2976 } 2977 else if ((HOST_WIDE_INT_1 << i) - 1 2978 == ~INTVAL (operands[2])) 2979 { 2980 rtx shift = GEN_INT (i); 2981 rtx reg = gen_reg_rtx (SImode); 2982 2983 emit_insn (gen_lshrsi3 (reg, operands[1], shift)); 2984 emit_insn (gen_ashlsi3 (operands[0], reg, shift)); 2985 2986 DONE; 2987 } 2988 } 2989 2990 operands[2] = force_reg (SImode, operands[2]); 2991 } 2992 } 2993 " 2994) 2995 2996; ??? Check split length for Thumb-2 2997(define_insn_and_split "*arm_andsi3_insn" 2998 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r") 2999 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r") 3000 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))] 3001 "TARGET_32BIT" 3002 "@ 3003 and%?\\t%0, %1, %2 3004 and%?\\t%0, %1, %2 3005 bic%?\\t%0, %1, #%B2 3006 and%?\\t%0, %1, %2 3007 #" 3008 "TARGET_32BIT 3009 && CONST_INT_P (operands[2]) 3010 && !(const_ok_for_arm (INTVAL (operands[2])) 3011 || const_ok_for_arm (~INTVAL (operands[2])))" 3012 [(clobber (const_int 0))] 3013 " 3014 arm_split_constant (AND, SImode, curr_insn, 3015 INTVAL (operands[2]), operands[0], operands[1], 0); 3016 DONE; 3017 " 3018 [(set_attr "length" "4,4,4,4,16") 3019 (set_attr "predicable" "yes") 3020 (set_attr "predicable_short_it" "no,yes,no,no,no") 3021 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")] 3022) 3023 3024(define_insn "*andsi3_compare0" 3025 [(set (reg:CC_NZ CC_REGNUM) 3026 (compare:CC_NZ 3027 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r") 3028 (match_operand:SI 2 "arm_not_operand" "I,K,r")) 3029 (const_int 0))) 3030 (set (match_operand:SI 0 "s_register_operand" "=r,r,r") 3031 (and:SI (match_dup 1) (match_dup 2)))] 3032 "TARGET_32BIT" 3033 "@ 3034 ands%?\\t%0, %1, %2 3035 bics%?\\t%0, %1, #%B2 3036 ands%?\\t%0, %1, %2" 3037 [(set_attr "conds" "set") 3038 (set_attr "type" "logics_imm,logics_imm,logics_reg")] 3039) 3040 3041(define_insn "*andsi3_compare0_scratch" 3042 [(set (reg:CC_NZ CC_REGNUM) 3043 (compare:CC_NZ 3044 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r") 3045 (match_operand:SI 1 "arm_not_operand" "I,K,r")) 3046 (const_int 0))) 3047 (clobber (match_scratch:SI 2 "=X,r,X"))] 3048 "TARGET_32BIT" 3049 "@ 3050 tst%?\\t%0, %1 3051 bics%?\\t%2, %0, #%B1 3052 tst%?\\t%0, %1" 3053 [(set_attr "conds" "set") 3054 (set_attr "type" "logics_imm,logics_imm,logics_reg")] 3055) 3056 3057(define_insn "*zeroextractsi_compare0_scratch" 3058 [(set (reg:CC_NZ CC_REGNUM) 3059 (compare:CC_NZ (zero_extract:SI 3060 (match_operand:SI 0 "s_register_operand" "r") 3061 (match_operand 1 "const_int_operand" "n") 3062 (match_operand 2 "const_int_operand" "n")) 3063 (const_int 0)))] 3064 "TARGET_32BIT 3065 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32 3066 && INTVAL (operands[1]) > 0 3067 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8 3068 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)" 3069 "* 3070 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1) 3071 << INTVAL (operands[2])); 3072 output_asm_insn (\"tst%?\\t%0, %1\", operands); 3073 return \"\"; 3074 " 3075 [(set_attr "conds" "set") 3076 (set_attr "predicable" "yes") 3077 (set_attr "type" "logics_imm")] 3078) 3079 3080(define_insn_and_split "*ne_zeroextractsi" 3081 [(set (match_operand:SI 0 "s_register_operand" "=r") 3082 (ne:SI (zero_extract:SI 3083 (match_operand:SI 1 "s_register_operand" "r") 3084 (match_operand:SI 2 "const_int_operand" "n") 3085 (match_operand:SI 3 "const_int_operand" "n")) 3086 (const_int 0))) 3087 (clobber (reg:CC CC_REGNUM))] 3088 "TARGET_32BIT 3089 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32 3090 && INTVAL (operands[2]) > 0 3091 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8 3092 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)" 3093 "#" 3094 "TARGET_32BIT 3095 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32 3096 && INTVAL (operands[2]) > 0 3097 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8 3098 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)" 3099 [(parallel [(set (reg:CC_NZ CC_REGNUM) 3100 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2)) 3101 (const_int 0))) 3102 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))]) 3103 (set (match_dup 0) 3104 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0)) 3105 (match_dup 0) (const_int 1)))] 3106 " 3107 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1) 3108 << INTVAL (operands[3])); 3109 " 3110 [(set_attr "conds" "clob") 3111 (set (attr "length") 3112 (if_then_else (eq_attr "is_thumb" "yes") 3113 (const_int 12) 3114 (const_int 8))) 3115 (set_attr "type" "multiple")] 3116) 3117 3118(define_insn_and_split "*ne_zeroextractsi_shifted" 3119 [(set (match_operand:SI 0 "s_register_operand" "=r") 3120 (ne:SI (zero_extract:SI 3121 (match_operand:SI 1 "s_register_operand" "r") 3122 (match_operand:SI 2 "const_int_operand" "n") 3123 (const_int 0)) 3124 (const_int 0))) 3125 (clobber (reg:CC CC_REGNUM))] 3126 "TARGET_ARM" 3127 "#" 3128 "TARGET_ARM" 3129 [(parallel [(set (reg:CC_NZ CC_REGNUM) 3130 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2)) 3131 (const_int 0))) 3132 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))]) 3133 (set (match_dup 0) 3134 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0)) 3135 (match_dup 0) (const_int 1)))] 3136 " 3137 operands[2] = GEN_INT (32 - INTVAL (operands[2])); 3138 " 3139 [(set_attr "conds" "clob") 3140 (set_attr "length" "8") 3141 (set_attr "type" "multiple")] 3142) 3143 3144(define_insn_and_split "*ite_ne_zeroextractsi" 3145 [(set (match_operand:SI 0 "s_register_operand" "=r") 3146 (if_then_else:SI (ne (zero_extract:SI 3147 (match_operand:SI 1 "s_register_operand" "r") 3148 (match_operand:SI 2 "const_int_operand" "n") 3149 (match_operand:SI 3 "const_int_operand" "n")) 3150 (const_int 0)) 3151 (match_operand:SI 4 "arm_not_operand" "rIK") 3152 (const_int 0))) 3153 (clobber (reg:CC CC_REGNUM))] 3154 "TARGET_ARM 3155 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32 3156 && INTVAL (operands[2]) > 0 3157 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8 3158 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32) 3159 && !reg_overlap_mentioned_p (operands[0], operands[4])" 3160 "#" 3161 "TARGET_ARM 3162 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32 3163 && INTVAL (operands[2]) > 0 3164 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8 3165 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32) 3166 && !reg_overlap_mentioned_p (operands[0], operands[4])" 3167 [(parallel [(set (reg:CC_NZ CC_REGNUM) 3168 (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2)) 3169 (const_int 0))) 3170 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))]) 3171 (set (match_dup 0) 3172 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0)) 3173 (match_dup 0) (match_dup 4)))] 3174 " 3175 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1) 3176 << INTVAL (operands[3])); 3177 " 3178 [(set_attr "conds" "clob") 3179 (set_attr "length" "8") 3180 (set_attr "type" "multiple")] 3181) 3182 3183(define_insn_and_split "*ite_ne_zeroextractsi_shifted" 3184 [(set (match_operand:SI 0 "s_register_operand" "=r") 3185 (if_then_else:SI (ne (zero_extract:SI 3186 (match_operand:SI 1 "s_register_operand" "r") 3187 (match_operand:SI 2 "const_int_operand" "n") 3188 (const_int 0)) 3189 (const_int 0)) 3190 (match_operand:SI 3 "arm_not_operand" "rIK") 3191 (const_int 0))) 3192 (clobber (reg:CC CC_REGNUM))] 3193 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])" 3194 "#" 3195 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])" 3196 [(parallel [(set (reg:CC_NZ CC_REGNUM) 3197 (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2)) 3198 (const_int 0))) 3199 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))]) 3200 (set (match_dup 0) 3201 (if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0)) 3202 (match_dup 0) (match_dup 3)))] 3203 " 3204 operands[2] = GEN_INT (32 - INTVAL (operands[2])); 3205 " 3206 [(set_attr "conds" "clob") 3207 (set_attr "length" "8") 3208 (set_attr "type" "multiple")] 3209) 3210 3211;; ??? Use Thumb-2 has bitfield insert/extract instructions. 3212(define_split 3213 [(set (match_operand:SI 0 "s_register_operand" "") 3214 (match_operator:SI 1 "shiftable_operator" 3215 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "") 3216 (match_operand:SI 3 "const_int_operand" "") 3217 (match_operand:SI 4 "const_int_operand" "")) 3218 (match_operand:SI 5 "s_register_operand" "")])) 3219 (clobber (match_operand:SI 6 "s_register_operand" ""))] 3220 "TARGET_ARM" 3221 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3))) 3222 (set (match_dup 0) 3223 (match_op_dup 1 3224 [(lshiftrt:SI (match_dup 6) (match_dup 4)) 3225 (match_dup 5)]))] 3226 "{ 3227 HOST_WIDE_INT temp = INTVAL (operands[3]); 3228 3229 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4])); 3230 operands[4] = GEN_INT (32 - temp); 3231 }" 3232) 3233 3234(define_split 3235 [(set (match_operand:SI 0 "s_register_operand" "") 3236 (match_operator:SI 1 "shiftable_operator" 3237 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "") 3238 (match_operand:SI 3 "const_int_operand" "") 3239 (match_operand:SI 4 "const_int_operand" "")) 3240 (match_operand:SI 5 "s_register_operand" "")])) 3241 (clobber (match_operand:SI 6 "s_register_operand" ""))] 3242 "TARGET_ARM" 3243 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3))) 3244 (set (match_dup 0) 3245 (match_op_dup 1 3246 [(ashiftrt:SI (match_dup 6) (match_dup 4)) 3247 (match_dup 5)]))] 3248 "{ 3249 HOST_WIDE_INT temp = INTVAL (operands[3]); 3250 3251 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4])); 3252 operands[4] = GEN_INT (32 - temp); 3253 }" 3254) 3255 3256;;; ??? This pattern is bogus. If operand3 has bits outside the range 3257;;; represented by the bitfield, then this will produce incorrect results. 3258;;; Somewhere, the value needs to be truncated. On targets like the m68k, 3259;;; which have a real bit-field insert instruction, the truncation happens 3260;;; in the bit-field insert instruction itself. Since arm does not have a 3261;;; bit-field insert instruction, we would have to emit code here to truncate 3262;;; the value before we insert. This loses some of the advantage of having 3263;;; this insv pattern, so this pattern needs to be reevalutated. 3264 3265(define_expand "insv" 3266 [(set (zero_extract (match_operand 0 "nonimmediate_operand") 3267 (match_operand 1 "general_operand") 3268 (match_operand 2 "general_operand")) 3269 (match_operand 3 "reg_or_int_operand"))] 3270 "TARGET_ARM || arm_arch_thumb2" 3271 " 3272 { 3273 int start_bit = INTVAL (operands[2]); 3274 int width = INTVAL (operands[1]); 3275 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1; 3276 rtx target, subtarget; 3277 3278 if (arm_arch_thumb2) 3279 { 3280 if (unaligned_access && MEM_P (operands[0]) 3281 && s_register_operand (operands[3], GET_MODE (operands[3])) 3282 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0) 3283 { 3284 rtx base_addr; 3285 3286 if (BYTES_BIG_ENDIAN) 3287 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width 3288 - start_bit; 3289 3290 if (width == 32) 3291 { 3292 base_addr = adjust_address (operands[0], SImode, 3293 start_bit / BITS_PER_UNIT); 3294 emit_insn (gen_unaligned_storesi (base_addr, operands[3])); 3295 } 3296 else 3297 { 3298 rtx tmp = gen_reg_rtx (HImode); 3299 3300 base_addr = adjust_address (operands[0], HImode, 3301 start_bit / BITS_PER_UNIT); 3302 emit_move_insn (tmp, gen_lowpart (HImode, operands[3])); 3303 emit_insn (gen_unaligned_storehi (base_addr, tmp)); 3304 } 3305 DONE; 3306 } 3307 else if (s_register_operand (operands[0], GET_MODE (operands[0]))) 3308 { 3309 bool use_bfi = TRUE; 3310 3311 if (CONST_INT_P (operands[3])) 3312 { 3313 HOST_WIDE_INT val = INTVAL (operands[3]) & mask; 3314 3315 if (val == 0) 3316 { 3317 emit_insn (gen_insv_zero (operands[0], operands[1], 3318 operands[2])); 3319 DONE; 3320 } 3321 3322 /* See if the set can be done with a single orr instruction. */ 3323 if (val == mask && const_ok_for_arm (val << start_bit)) 3324 use_bfi = FALSE; 3325 } 3326 3327 if (use_bfi) 3328 { 3329 if (!REG_P (operands[3])) 3330 operands[3] = force_reg (SImode, operands[3]); 3331 3332 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2], 3333 operands[3])); 3334 DONE; 3335 } 3336 } 3337 else 3338 FAIL; 3339 } 3340 3341 if (!s_register_operand (operands[0], GET_MODE (operands[0]))) 3342 FAIL; 3343 3344 target = copy_rtx (operands[0]); 3345 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical 3346 subreg as the final target. */ 3347 if (GET_CODE (target) == SUBREG) 3348 { 3349 subtarget = gen_reg_rtx (SImode); 3350 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target))) 3351 < GET_MODE_SIZE (SImode)) 3352 target = SUBREG_REG (target); 3353 } 3354 else 3355 subtarget = target; 3356 3357 if (CONST_INT_P (operands[3])) 3358 { 3359 /* Since we are inserting a known constant, we may be able to 3360 reduce the number of bits that we have to clear so that 3361 the mask becomes simple. */ 3362 /* ??? This code does not check to see if the new mask is actually 3363 simpler. It may not be. */ 3364 rtx op1 = gen_reg_rtx (SImode); 3365 /* ??? Truncate operand3 to fit in the bitfield. See comment before 3366 start of this pattern. */ 3367 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]); 3368 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit); 3369 3370 emit_insn (gen_andsi3 (op1, operands[0], 3371 gen_int_mode (~mask2, SImode))); 3372 emit_insn (gen_iorsi3 (subtarget, op1, 3373 gen_int_mode (op3_value << start_bit, SImode))); 3374 } 3375 else if (start_bit == 0 3376 && !(const_ok_for_arm (mask) 3377 || const_ok_for_arm (~mask))) 3378 { 3379 /* A Trick, since we are setting the bottom bits in the word, 3380 we can shift operand[3] up, operand[0] down, OR them together 3381 and rotate the result back again. This takes 3 insns, and 3382 the third might be mergeable into another op. */ 3383 /* The shift up copes with the possibility that operand[3] is 3384 wider than the bitfield. */ 3385 rtx op0 = gen_reg_rtx (SImode); 3386 rtx op1 = gen_reg_rtx (SImode); 3387 3388 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width))); 3389 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1])); 3390 emit_insn (gen_iorsi3 (op1, op1, op0)); 3391 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1])); 3392 } 3393 else if ((width + start_bit == 32) 3394 && !(const_ok_for_arm (mask) 3395 || const_ok_for_arm (~mask))) 3396 { 3397 /* Similar trick, but slightly less efficient. */ 3398 3399 rtx op0 = gen_reg_rtx (SImode); 3400 rtx op1 = gen_reg_rtx (SImode); 3401 3402 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width))); 3403 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1])); 3404 emit_insn (gen_lshrsi3 (op1, op1, operands[1])); 3405 emit_insn (gen_iorsi3 (subtarget, op1, op0)); 3406 } 3407 else 3408 { 3409 rtx op0 = gen_int_mode (mask, SImode); 3410 rtx op1 = gen_reg_rtx (SImode); 3411 rtx op2 = gen_reg_rtx (SImode); 3412 3413 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask))) 3414 { 3415 rtx tmp = gen_reg_rtx (SImode); 3416 3417 emit_insn (gen_movsi (tmp, op0)); 3418 op0 = tmp; 3419 } 3420 3421 /* Mask out any bits in operand[3] that are not needed. */ 3422 emit_insn (gen_andsi3 (op1, operands[3], op0)); 3423 3424 if (CONST_INT_P (op0) 3425 && (const_ok_for_arm (mask << start_bit) 3426 || const_ok_for_arm (~(mask << start_bit)))) 3427 { 3428 op0 = gen_int_mode (~(mask << start_bit), SImode); 3429 emit_insn (gen_andsi3 (op2, operands[0], op0)); 3430 } 3431 else 3432 { 3433 if (CONST_INT_P (op0)) 3434 { 3435 rtx tmp = gen_reg_rtx (SImode); 3436 3437 emit_insn (gen_movsi (tmp, op0)); 3438 op0 = tmp; 3439 } 3440 3441 if (start_bit != 0) 3442 emit_insn (gen_ashlsi3 (op0, op0, operands[2])); 3443 3444 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0)); 3445 } 3446 3447 if (start_bit != 0) 3448 emit_insn (gen_ashlsi3 (op1, op1, operands[2])); 3449 3450 emit_insn (gen_iorsi3 (subtarget, op1, op2)); 3451 } 3452 3453 if (subtarget != target) 3454 { 3455 /* If TARGET is still a SUBREG, then it must be wider than a word, 3456 so we must be careful only to set the subword we were asked to. */ 3457 if (GET_CODE (target) == SUBREG) 3458 emit_move_insn (target, subtarget); 3459 else 3460 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget)); 3461 } 3462 3463 DONE; 3464 }" 3465) 3466 3467(define_insn "insv_zero" 3468 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r") 3469 (match_operand:SI 1 "const_int_M_operand" "M") 3470 (match_operand:SI 2 "const_int_M_operand" "M")) 3471 (const_int 0))] 3472 "arm_arch_thumb2" 3473 "bfc%?\t%0, %2, %1" 3474 [(set_attr "length" "4") 3475 (set_attr "predicable" "yes") 3476 (set_attr "type" "bfm")] 3477) 3478 3479(define_insn "insv_t2" 3480 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r") 3481 (match_operand:SI 1 "const_int_M_operand" "M") 3482 (match_operand:SI 2 "const_int_M_operand" "M")) 3483 (match_operand:SI 3 "s_register_operand" "r"))] 3484 "arm_arch_thumb2" 3485 "bfi%?\t%0, %3, %2, %1" 3486 [(set_attr "length" "4") 3487 (set_attr "predicable" "yes") 3488 (set_attr "type" "bfm")] 3489) 3490 3491(define_insn "andsi_notsi_si" 3492 [(set (match_operand:SI 0 "s_register_operand" "=r") 3493 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r")) 3494 (match_operand:SI 1 "s_register_operand" "r")))] 3495 "TARGET_32BIT" 3496 "bic%?\\t%0, %1, %2" 3497 [(set_attr "predicable" "yes") 3498 (set_attr "type" "logic_reg")] 3499) 3500 3501(define_insn "andsi_not_shiftsi_si" 3502 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 3503 (and:SI (not:SI (match_operator:SI 4 "shift_operator" 3504 [(match_operand:SI 2 "s_register_operand" "r,r") 3505 (match_operand:SI 3 "shift_amount_operand" "M,r")])) 3506 (match_operand:SI 1 "s_register_operand" "r,r")))] 3507 "TARGET_32BIT" 3508 "bic%?\\t%0, %1, %2%S4" 3509 [(set_attr "predicable" "yes") 3510 (set_attr "shift" "2") 3511 (set_attr "arch" "32,a") 3512 (set_attr "type" "logic_shift_imm,logic_shift_reg")] 3513) 3514 3515;; Shifted bics pattern used to set up CC status register and not reusing 3516;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2 3517;; does not support shift by register. 3518(define_insn "andsi_not_shiftsi_si_scc_no_reuse" 3519 [(set (reg:CC_NZ CC_REGNUM) 3520 (compare:CC_NZ 3521 (and:SI (not:SI (match_operator:SI 0 "shift_operator" 3522 [(match_operand:SI 1 "s_register_operand" "r,r") 3523 (match_operand:SI 2 "shift_amount_operand" "M,r")])) 3524 (match_operand:SI 3 "s_register_operand" "r,r")) 3525 (const_int 0))) 3526 (clobber (match_scratch:SI 4 "=r,r"))] 3527 "TARGET_32BIT" 3528 "bics%?\\t%4, %3, %1%S0" 3529 [(set_attr "predicable" "yes") 3530 (set_attr "arch" "32,a") 3531 (set_attr "conds" "set") 3532 (set_attr "shift" "1") 3533 (set_attr "type" "logic_shift_imm,logic_shift_reg")] 3534) 3535 3536;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also 3537;; getting reused later. 3538(define_insn "andsi_not_shiftsi_si_scc" 3539 [(parallel [(set (reg:CC_NZ CC_REGNUM) 3540 (compare:CC_NZ 3541 (and:SI (not:SI (match_operator:SI 0 "shift_operator" 3542 [(match_operand:SI 1 "s_register_operand" "r,r") 3543 (match_operand:SI 2 "shift_amount_operand" "M,r")])) 3544 (match_operand:SI 3 "s_register_operand" "r,r")) 3545 (const_int 0))) 3546 (set (match_operand:SI 4 "s_register_operand" "=r,r") 3547 (and:SI (not:SI (match_op_dup 0 3548 [(match_dup 1) 3549 (match_dup 2)])) 3550 (match_dup 3)))])] 3551 "TARGET_32BIT" 3552 "bics%?\\t%4, %3, %1%S0" 3553 [(set_attr "predicable" "yes") 3554 (set_attr "arch" "32,a") 3555 (set_attr "conds" "set") 3556 (set_attr "shift" "1") 3557 (set_attr "type" "logic_shift_imm,logic_shift_reg")] 3558) 3559 3560(define_insn "*andsi_notsi_si_compare0" 3561 [(set (reg:CC_NZ CC_REGNUM) 3562 (compare:CC_NZ 3563 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r")) 3564 (match_operand:SI 1 "s_register_operand" "r")) 3565 (const_int 0))) 3566 (set (match_operand:SI 0 "s_register_operand" "=r") 3567 (and:SI (not:SI (match_dup 2)) (match_dup 1)))] 3568 "TARGET_32BIT" 3569 "bics\\t%0, %1, %2" 3570 [(set_attr "conds" "set") 3571 (set_attr "type" "logics_shift_reg")] 3572) 3573 3574(define_insn "*andsi_notsi_si_compare0_scratch" 3575 [(set (reg:CC_NZ CC_REGNUM) 3576 (compare:CC_NZ 3577 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r")) 3578 (match_operand:SI 1 "s_register_operand" "r")) 3579 (const_int 0))) 3580 (clobber (match_scratch:SI 0 "=r"))] 3581 "TARGET_32BIT" 3582 "bics\\t%0, %1, %2" 3583 [(set_attr "conds" "set") 3584 (set_attr "type" "logics_shift_reg")] 3585) 3586 3587(define_expand "iorsi3" 3588 [(set (match_operand:SI 0 "s_register_operand") 3589 (ior:SI (match_operand:SI 1 "s_register_operand") 3590 (match_operand:SI 2 "reg_or_int_operand")))] 3591 "TARGET_EITHER" 3592 " 3593 if (CONST_INT_P (operands[2])) 3594 { 3595 if (TARGET_32BIT) 3596 { 3597 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR)) 3598 operands[2] = force_reg (SImode, operands[2]); 3599 else 3600 { 3601 arm_split_constant (IOR, SImode, NULL_RTX, 3602 INTVAL (operands[2]), operands[0], 3603 operands[1], 3604 optimize && can_create_pseudo_p ()); 3605 DONE; 3606 } 3607 } 3608 else /* TARGET_THUMB1 */ 3609 { 3610 rtx tmp = force_reg (SImode, operands[2]); 3611 if (rtx_equal_p (operands[0], operands[1])) 3612 operands[2] = tmp; 3613 else 3614 { 3615 operands[2] = operands[1]; 3616 operands[1] = tmp; 3617 } 3618 } 3619 } 3620 " 3621) 3622 3623(define_insn_and_split "*iorsi3_insn" 3624 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r") 3625 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r") 3626 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))] 3627 "TARGET_32BIT" 3628 "@ 3629 orr%?\\t%0, %1, %2 3630 orr%?\\t%0, %1, %2 3631 orn%?\\t%0, %1, #%B2 3632 orr%?\\t%0, %1, %2 3633 #" 3634 "TARGET_32BIT 3635 && CONST_INT_P (operands[2]) 3636 && !(const_ok_for_arm (INTVAL (operands[2])) 3637 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))" 3638 [(clobber (const_int 0))] 3639{ 3640 arm_split_constant (IOR, SImode, curr_insn, 3641 INTVAL (operands[2]), operands[0], operands[1], 0); 3642 DONE; 3643} 3644 [(set_attr "length" "4,4,4,4,16") 3645 (set_attr "arch" "32,t2,t2,32,32") 3646 (set_attr "predicable" "yes") 3647 (set_attr "predicable_short_it" "no,yes,no,no,no") 3648 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")] 3649) 3650 3651(define_peephole2 3652 [(match_scratch:SI 3 "r") 3653 (set (match_operand:SI 0 "arm_general_register_operand" "") 3654 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "") 3655 (match_operand:SI 2 "const_int_operand" "")))] 3656 "TARGET_ARM 3657 && !const_ok_for_arm (INTVAL (operands[2])) 3658 && const_ok_for_arm (~INTVAL (operands[2]))" 3659 [(set (match_dup 3) (match_dup 2)) 3660 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))] 3661 "" 3662) 3663 3664(define_insn "*iorsi3_compare0" 3665 [(set (reg:CC_NZ CC_REGNUM) 3666 (compare:CC_NZ 3667 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r") 3668 (match_operand:SI 2 "arm_rhs_operand" "I,l,r")) 3669 (const_int 0))) 3670 (set (match_operand:SI 0 "s_register_operand" "=r,l,r") 3671 (ior:SI (match_dup 1) (match_dup 2)))] 3672 "TARGET_32BIT" 3673 "orrs%?\\t%0, %1, %2" 3674 [(set_attr "conds" "set") 3675 (set_attr "arch" "*,t2,*") 3676 (set_attr "length" "4,2,4") 3677 (set_attr "type" "logics_imm,logics_reg,logics_reg")] 3678) 3679 3680(define_insn "*iorsi3_compare0_scratch" 3681 [(set (reg:CC_NZ CC_REGNUM) 3682 (compare:CC_NZ 3683 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r") 3684 (match_operand:SI 2 "arm_rhs_operand" "I,l,r")) 3685 (const_int 0))) 3686 (clobber (match_scratch:SI 0 "=r,l,r"))] 3687 "TARGET_32BIT" 3688 "orrs%?\\t%0, %1, %2" 3689 [(set_attr "conds" "set") 3690 (set_attr "arch" "*,t2,*") 3691 (set_attr "length" "4,2,4") 3692 (set_attr "type" "logics_imm,logics_reg,logics_reg")] 3693) 3694 3695(define_expand "xorsi3" 3696 [(set (match_operand:SI 0 "s_register_operand") 3697 (xor:SI (match_operand:SI 1 "s_register_operand") 3698 (match_operand:SI 2 "reg_or_int_operand")))] 3699 "TARGET_EITHER" 3700 "if (CONST_INT_P (operands[2])) 3701 { 3702 if (TARGET_32BIT) 3703 { 3704 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR)) 3705 operands[2] = force_reg (SImode, operands[2]); 3706 else 3707 { 3708 arm_split_constant (XOR, SImode, NULL_RTX, 3709 INTVAL (operands[2]), operands[0], 3710 operands[1], 3711 optimize && can_create_pseudo_p ()); 3712 DONE; 3713 } 3714 } 3715 else /* TARGET_THUMB1 */ 3716 { 3717 rtx tmp = force_reg (SImode, operands[2]); 3718 if (rtx_equal_p (operands[0], operands[1])) 3719 operands[2] = tmp; 3720 else 3721 { 3722 operands[2] = operands[1]; 3723 operands[1] = tmp; 3724 } 3725 } 3726 }" 3727) 3728 3729(define_insn_and_split "*arm_xorsi3" 3730 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r") 3731 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r") 3732 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))] 3733 "TARGET_32BIT" 3734 "@ 3735 eor%?\\t%0, %1, %2 3736 eor%?\\t%0, %1, %2 3737 eor%?\\t%0, %1, %2 3738 #" 3739 "TARGET_32BIT 3740 && CONST_INT_P (operands[2]) 3741 && !const_ok_for_arm (INTVAL (operands[2]))" 3742 [(clobber (const_int 0))] 3743{ 3744 arm_split_constant (XOR, SImode, curr_insn, 3745 INTVAL (operands[2]), operands[0], operands[1], 0); 3746 DONE; 3747} 3748 [(set_attr "length" "4,4,4,16") 3749 (set_attr "predicable" "yes") 3750 (set_attr "predicable_short_it" "no,yes,no,no") 3751 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")] 3752) 3753 3754(define_insn "*xorsi3_compare0" 3755 [(set (reg:CC_NZ CC_REGNUM) 3756 (compare:CC_NZ (xor:SI (match_operand:SI 1 "s_register_operand" "r,r") 3757 (match_operand:SI 2 "arm_rhs_operand" "I,r")) 3758 (const_int 0))) 3759 (set (match_operand:SI 0 "s_register_operand" "=r,r") 3760 (xor:SI (match_dup 1) (match_dup 2)))] 3761 "TARGET_32BIT" 3762 "eors%?\\t%0, %1, %2" 3763 [(set_attr "conds" "set") 3764 (set_attr "type" "logics_imm,logics_reg")] 3765) 3766 3767(define_insn "*xorsi3_compare0_scratch" 3768 [(set (reg:CC_NZ CC_REGNUM) 3769 (compare:CC_NZ (xor:SI (match_operand:SI 0 "s_register_operand" "r,r") 3770 (match_operand:SI 1 "arm_rhs_operand" "I,r")) 3771 (const_int 0)))] 3772 "TARGET_32BIT" 3773 "teq%?\\t%0, %1" 3774 [(set_attr "conds" "set") 3775 (set_attr "type" "logics_imm,logics_reg")] 3776) 3777 3778; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C), 3779; (NOT D) we can sometimes merge the final NOT into one of the following 3780; insns. 3781 3782(define_split 3783 [(set (match_operand:SI 0 "s_register_operand" "") 3784 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" "")) 3785 (not:SI (match_operand:SI 2 "arm_rhs_operand" ""))) 3786 (match_operand:SI 3 "arm_rhs_operand" ""))) 3787 (clobber (match_operand:SI 4 "s_register_operand" ""))] 3788 "TARGET_32BIT" 3789 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2)) 3790 (not:SI (match_dup 3)))) 3791 (set (match_dup 0) (not:SI (match_dup 4)))] 3792 "" 3793) 3794 3795(define_insn_and_split "*andsi_iorsi3_notsi" 3796 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r") 3797 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r") 3798 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")) 3799 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))] 3800 "TARGET_32BIT" 3801 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3" 3802 "&& reload_completed" 3803 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2))) 3804 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))] 3805 { 3806 /* If operands[3] is a constant make sure to fold the NOT into it 3807 to avoid creating a NOT of a CONST_INT. */ 3808 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode); 3809 if (CONST_INT_P (not_rtx)) 3810 { 3811 operands[4] = operands[0]; 3812 operands[5] = not_rtx; 3813 } 3814 else 3815 { 3816 operands[5] = operands[0]; 3817 operands[4] = not_rtx; 3818 } 3819 } 3820 [(set_attr "length" "8") 3821 (set_attr "ce_count" "2") 3822 (set_attr "predicable" "yes") 3823 (set_attr "type" "multiple")] 3824) 3825 3826; ??? Are these four splitters still beneficial when the Thumb-2 bitfield 3827; insns are available? 3828(define_split 3829 [(set (match_operand:SI 0 "s_register_operand" "") 3830 (match_operator:SI 1 "logical_binary_operator" 3831 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "") 3832 (match_operand:SI 3 "const_int_operand" "") 3833 (match_operand:SI 4 "const_int_operand" "")) 3834 (match_operator:SI 9 "logical_binary_operator" 3835 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "") 3836 (match_operand:SI 6 "const_int_operand" "")) 3837 (match_operand:SI 7 "s_register_operand" "")])])) 3838 (clobber (match_operand:SI 8 "s_register_operand" ""))] 3839 "TARGET_32BIT 3840 && GET_CODE (operands[1]) == GET_CODE (operands[9]) 3841 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])" 3842 [(set (match_dup 8) 3843 (match_op_dup 1 3844 [(ashift:SI (match_dup 2) (match_dup 4)) 3845 (match_dup 5)])) 3846 (set (match_dup 0) 3847 (match_op_dup 1 3848 [(lshiftrt:SI (match_dup 8) (match_dup 6)) 3849 (match_dup 7)]))] 3850 " 3851 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4]))); 3852") 3853 3854(define_split 3855 [(set (match_operand:SI 0 "s_register_operand" "") 3856 (match_operator:SI 1 "logical_binary_operator" 3857 [(match_operator:SI 9 "logical_binary_operator" 3858 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "") 3859 (match_operand:SI 6 "const_int_operand" "")) 3860 (match_operand:SI 7 "s_register_operand" "")]) 3861 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "") 3862 (match_operand:SI 3 "const_int_operand" "") 3863 (match_operand:SI 4 "const_int_operand" ""))])) 3864 (clobber (match_operand:SI 8 "s_register_operand" ""))] 3865 "TARGET_32BIT 3866 && GET_CODE (operands[1]) == GET_CODE (operands[9]) 3867 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])" 3868 [(set (match_dup 8) 3869 (match_op_dup 1 3870 [(ashift:SI (match_dup 2) (match_dup 4)) 3871 (match_dup 5)])) 3872 (set (match_dup 0) 3873 (match_op_dup 1 3874 [(lshiftrt:SI (match_dup 8) (match_dup 6)) 3875 (match_dup 7)]))] 3876 " 3877 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4]))); 3878") 3879 3880(define_split 3881 [(set (match_operand:SI 0 "s_register_operand" "") 3882 (match_operator:SI 1 "logical_binary_operator" 3883 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "") 3884 (match_operand:SI 3 "const_int_operand" "") 3885 (match_operand:SI 4 "const_int_operand" "")) 3886 (match_operator:SI 9 "logical_binary_operator" 3887 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "") 3888 (match_operand:SI 6 "const_int_operand" "")) 3889 (match_operand:SI 7 "s_register_operand" "")])])) 3890 (clobber (match_operand:SI 8 "s_register_operand" ""))] 3891 "TARGET_32BIT 3892 && GET_CODE (operands[1]) == GET_CODE (operands[9]) 3893 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])" 3894 [(set (match_dup 8) 3895 (match_op_dup 1 3896 [(ashift:SI (match_dup 2) (match_dup 4)) 3897 (match_dup 5)])) 3898 (set (match_dup 0) 3899 (match_op_dup 1 3900 [(ashiftrt:SI (match_dup 8) (match_dup 6)) 3901 (match_dup 7)]))] 3902 " 3903 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4]))); 3904") 3905 3906(define_split 3907 [(set (match_operand:SI 0 "s_register_operand" "") 3908 (match_operator:SI 1 "logical_binary_operator" 3909 [(match_operator:SI 9 "logical_binary_operator" 3910 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "") 3911 (match_operand:SI 6 "const_int_operand" "")) 3912 (match_operand:SI 7 "s_register_operand" "")]) 3913 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "") 3914 (match_operand:SI 3 "const_int_operand" "") 3915 (match_operand:SI 4 "const_int_operand" ""))])) 3916 (clobber (match_operand:SI 8 "s_register_operand" ""))] 3917 "TARGET_32BIT 3918 && GET_CODE (operands[1]) == GET_CODE (operands[9]) 3919 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])" 3920 [(set (match_dup 8) 3921 (match_op_dup 1 3922 [(ashift:SI (match_dup 2) (match_dup 4)) 3923 (match_dup 5)])) 3924 (set (match_dup 0) 3925 (match_op_dup 1 3926 [(ashiftrt:SI (match_dup 8) (match_dup 6)) 3927 (match_dup 7)]))] 3928 " 3929 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4]))); 3930") 3931 3932 3933;; Minimum and maximum insns 3934 3935(define_expand "smaxsi3" 3936 [(parallel [ 3937 (set (match_operand:SI 0 "s_register_operand") 3938 (smax:SI (match_operand:SI 1 "s_register_operand") 3939 (match_operand:SI 2 "arm_rhs_operand"))) 3940 (clobber (reg:CC CC_REGNUM))])] 3941 "TARGET_32BIT" 3942 " 3943 if (operands[2] == const0_rtx || operands[2] == constm1_rtx) 3944 { 3945 /* No need for a clobber of the condition code register here. */ 3946 emit_insn (gen_rtx_SET (operands[0], 3947 gen_rtx_SMAX (SImode, operands[1], 3948 operands[2]))); 3949 DONE; 3950 } 3951") 3952 3953(define_insn "*smax_0" 3954 [(set (match_operand:SI 0 "s_register_operand" "=r") 3955 (smax:SI (match_operand:SI 1 "s_register_operand" "r") 3956 (const_int 0)))] 3957 "TARGET_32BIT" 3958 "bic%?\\t%0, %1, %1, asr #31" 3959 [(set_attr "predicable" "yes") 3960 (set_attr "type" "logic_shift_reg")] 3961) 3962 3963(define_insn "*smax_m1" 3964 [(set (match_operand:SI 0 "s_register_operand" "=r") 3965 (smax:SI (match_operand:SI 1 "s_register_operand" "r") 3966 (const_int -1)))] 3967 "TARGET_32BIT" 3968 "orr%?\\t%0, %1, %1, asr #31" 3969 [(set_attr "predicable" "yes") 3970 (set_attr "type" "logic_shift_reg")] 3971) 3972 3973(define_insn_and_split "*arm_smax_insn" 3974 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 3975 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r") 3976 (match_operand:SI 2 "arm_rhs_operand" "rI,rI"))) 3977 (clobber (reg:CC CC_REGNUM))] 3978 "TARGET_ARM" 3979 "#" 3980 ; cmp\\t%1, %2\;movlt\\t%0, %2 3981 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2" 3982 "TARGET_ARM" 3983 [(set (reg:CC CC_REGNUM) 3984 (compare:CC (match_dup 1) (match_dup 2))) 3985 (set (match_dup 0) 3986 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0)) 3987 (match_dup 1) 3988 (match_dup 2)))] 3989 "" 3990 [(set_attr "conds" "clob") 3991 (set_attr "length" "8,12") 3992 (set_attr "type" "multiple")] 3993) 3994 3995(define_expand "sminsi3" 3996 [(parallel [ 3997 (set (match_operand:SI 0 "s_register_operand") 3998 (smin:SI (match_operand:SI 1 "s_register_operand") 3999 (match_operand:SI 2 "arm_rhs_operand"))) 4000 (clobber (reg:CC CC_REGNUM))])] 4001 "TARGET_32BIT" 4002 " 4003 if (operands[2] == const0_rtx) 4004 { 4005 /* No need for a clobber of the condition code register here. */ 4006 emit_insn (gen_rtx_SET (operands[0], 4007 gen_rtx_SMIN (SImode, operands[1], 4008 operands[2]))); 4009 DONE; 4010 } 4011") 4012 4013(define_insn "*smin_0" 4014 [(set (match_operand:SI 0 "s_register_operand" "=r") 4015 (smin:SI (match_operand:SI 1 "s_register_operand" "r") 4016 (const_int 0)))] 4017 "TARGET_32BIT" 4018 "and%?\\t%0, %1, %1, asr #31" 4019 [(set_attr "predicable" "yes") 4020 (set_attr "type" "logic_shift_reg")] 4021) 4022 4023(define_insn_and_split "*arm_smin_insn" 4024 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 4025 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r") 4026 (match_operand:SI 2 "arm_rhs_operand" "rI,rI"))) 4027 (clobber (reg:CC CC_REGNUM))] 4028 "TARGET_ARM" 4029 "#" 4030 ; cmp\\t%1, %2\;movge\\t%0, %2 4031 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2" 4032 "TARGET_ARM" 4033 [(set (reg:CC CC_REGNUM) 4034 (compare:CC (match_dup 1) (match_dup 2))) 4035 (set (match_dup 0) 4036 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0)) 4037 (match_dup 1) 4038 (match_dup 2)))] 4039 "" 4040 [(set_attr "conds" "clob") 4041 (set_attr "length" "8,12") 4042 (set_attr "type" "multiple,multiple")] 4043) 4044 4045(define_expand "umaxsi3" 4046 [(parallel [ 4047 (set (match_operand:SI 0 "s_register_operand") 4048 (umax:SI (match_operand:SI 1 "s_register_operand") 4049 (match_operand:SI 2 "arm_rhs_operand"))) 4050 (clobber (reg:CC CC_REGNUM))])] 4051 "TARGET_32BIT" 4052 "" 4053) 4054 4055(define_insn_and_split "*arm_umaxsi3" 4056 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") 4057 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r") 4058 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))) 4059 (clobber (reg:CC CC_REGNUM))] 4060 "TARGET_ARM" 4061 "#" 4062 ; cmp\\t%1, %2\;movcc\\t%0, %2 4063 ; cmp\\t%1, %2\;movcs\\t%0, %1 4064 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2" 4065 "TARGET_ARM" 4066 [(set (reg:CC CC_REGNUM) 4067 (compare:CC (match_dup 1) (match_dup 2))) 4068 (set (match_dup 0) 4069 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0)) 4070 (match_dup 1) 4071 (match_dup 2)))] 4072 "" 4073 [(set_attr "conds" "clob") 4074 (set_attr "length" "8,8,12") 4075 (set_attr "type" "store_4")] 4076) 4077 4078(define_expand "uminsi3" 4079 [(parallel [ 4080 (set (match_operand:SI 0 "s_register_operand") 4081 (umin:SI (match_operand:SI 1 "s_register_operand") 4082 (match_operand:SI 2 "arm_rhs_operand"))) 4083 (clobber (reg:CC CC_REGNUM))])] 4084 "TARGET_32BIT" 4085 "" 4086) 4087 4088(define_insn_and_split "*arm_uminsi3" 4089 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") 4090 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r") 4091 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))) 4092 (clobber (reg:CC CC_REGNUM))] 4093 "TARGET_ARM" 4094 "#" 4095 ; cmp\\t%1, %2\;movcs\\t%0, %2 4096 ; cmp\\t%1, %2\;movcc\\t%0, %1 4097 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2" 4098 "TARGET_ARM" 4099 [(set (reg:CC CC_REGNUM) 4100 (compare:CC (match_dup 1) (match_dup 2))) 4101 (set (match_dup 0) 4102 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0)) 4103 (match_dup 1) 4104 (match_dup 2)))] 4105 "" 4106 [(set_attr "conds" "clob") 4107 (set_attr "length" "8,8,12") 4108 (set_attr "type" "store_4")] 4109) 4110 4111(define_insn "*store_minmaxsi" 4112 [(set (match_operand:SI 0 "memory_operand" "=m") 4113 (match_operator:SI 3 "minmax_operator" 4114 [(match_operand:SI 1 "s_register_operand" "r") 4115 (match_operand:SI 2 "s_register_operand" "r")])) 4116 (clobber (reg:CC CC_REGNUM))] 4117 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it" 4118 "* 4119 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode, 4120 operands[1], operands[2]); 4121 output_asm_insn (\"cmp\\t%1, %2\", operands); 4122 if (TARGET_THUMB2) 4123 output_asm_insn (\"ite\t%d3\", operands); 4124 output_asm_insn (\"str%d3\\t%1, %0\", operands); 4125 output_asm_insn (\"str%D3\\t%2, %0\", operands); 4126 return \"\"; 4127 " 4128 [(set_attr "conds" "clob") 4129 (set (attr "length") 4130 (if_then_else (eq_attr "is_thumb" "yes") 4131 (const_int 14) 4132 (const_int 12))) 4133 (set_attr "type" "store_4")] 4134) 4135 4136; Reject the frame pointer in operand[1], since reloading this after 4137; it has been eliminated can cause carnage. 4138(define_insn "*minmax_arithsi" 4139 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 4140 (match_operator:SI 4 "shiftable_operator" 4141 [(match_operator:SI 5 "minmax_operator" 4142 [(match_operand:SI 2 "s_register_operand" "r,r") 4143 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")]) 4144 (match_operand:SI 1 "s_register_operand" "0,?r")])) 4145 (clobber (reg:CC CC_REGNUM))] 4146 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it" 4147 "* 4148 { 4149 enum rtx_code code = GET_CODE (operands[4]); 4150 bool need_else; 4151 4152 if (which_alternative != 0 || operands[3] != const0_rtx 4153 || (code != PLUS && code != IOR && code != XOR)) 4154 need_else = true; 4155 else 4156 need_else = false; 4157 4158 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode, 4159 operands[2], operands[3]); 4160 output_asm_insn (\"cmp\\t%2, %3\", operands); 4161 if (TARGET_THUMB2) 4162 { 4163 if (need_else) 4164 output_asm_insn (\"ite\\t%d5\", operands); 4165 else 4166 output_asm_insn (\"it\\t%d5\", operands); 4167 } 4168 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands); 4169 if (need_else) 4170 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands); 4171 return \"\"; 4172 }" 4173 [(set_attr "conds" "clob") 4174 (set (attr "length") 4175 (if_then_else (eq_attr "is_thumb" "yes") 4176 (const_int 14) 4177 (const_int 12))) 4178 (set_attr "type" "multiple")] 4179) 4180 4181; Reject the frame pointer in operand[1], since reloading this after 4182; it has been eliminated can cause carnage. 4183(define_insn_and_split "*minmax_arithsi_non_canon" 4184 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts") 4185 (minus:SI 4186 (match_operand:SI 1 "s_register_operand" "0,?Ts") 4187 (match_operator:SI 4 "minmax_operator" 4188 [(match_operand:SI 2 "s_register_operand" "Ts,Ts") 4189 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")]))) 4190 (clobber (reg:CC CC_REGNUM))] 4191 "TARGET_32BIT && !arm_eliminable_register (operands[1]) 4192 && !(arm_restrict_it && CONST_INT_P (operands[3]))" 4193 "#" 4194 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed" 4195 [(set (reg:CC CC_REGNUM) 4196 (compare:CC (match_dup 2) (match_dup 3))) 4197 4198 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)]) 4199 (set (match_dup 0) 4200 (minus:SI (match_dup 1) 4201 (match_dup 2)))) 4202 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)]) 4203 (set (match_dup 0) 4204 (match_dup 6)))] 4205 { 4206 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]), 4207 operands[2], operands[3]); 4208 enum rtx_code rc = minmax_code (operands[4]); 4209 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, 4210 operands[2], operands[3]); 4211 4212 if (mode == CCFPmode || mode == CCFPEmode) 4213 rc = reverse_condition_maybe_unordered (rc); 4214 else 4215 rc = reverse_condition (rc); 4216 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]); 4217 if (CONST_INT_P (operands[3])) 4218 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3])); 4219 else 4220 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]); 4221 } 4222 [(set_attr "conds" "clob") 4223 (set (attr "length") 4224 (if_then_else (eq_attr "is_thumb" "yes") 4225 (const_int 14) 4226 (const_int 12))) 4227 (set_attr "type" "multiple")] 4228) 4229 4230 4231(define_expand "arm_<ss_op>" 4232 [(set (match_operand:SI 0 "s_register_operand") 4233 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand") 4234 (match_operand:SI 2 "s_register_operand")))] 4235 "TARGET_DSP_MULTIPLY" 4236 { 4237 if (ARM_Q_BIT_READ) 4238 emit_insn (gen_arm_<ss_op>_setq_insn (operands[0], 4239 operands[1], operands[2])); 4240 else 4241 emit_insn (gen_arm_<ss_op>_insn (operands[0], operands[1], operands[2])); 4242 DONE; 4243 } 4244) 4245 4246(define_insn "arm_<ss_op><add_clobber_q_name>_insn" 4247 [(set (match_operand:SI 0 "s_register_operand" "=r") 4248 (SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand" "r") 4249 (match_operand:SI 2 "s_register_operand" "r")))] 4250 "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>" 4251 "<ss_op>%?\t%0, %1, %2" 4252 [(set_attr "predicable" "yes") 4253 (set_attr "type" "alu_dsp_reg")] 4254) 4255 4256(define_code_iterator SAT [smin smax]) 4257(define_code_attr SATrev [(smin "smax") (smax "smin")]) 4258(define_code_attr SATlo [(smin "1") (smax "2")]) 4259(define_code_attr SAThi [(smin "2") (smax "1")]) 4260 4261(define_expand "arm_ssat" 4262 [(match_operand:SI 0 "s_register_operand") 4263 (match_operand:SI 1 "s_register_operand") 4264 (match_operand:SI 2 "const_int_operand")] 4265 "TARGET_32BIT && arm_arch6" 4266 { 4267 HOST_WIDE_INT val = INTVAL (operands[2]); 4268 /* The builtin checking code should have ensured the right 4269 range for the immediate. */ 4270 gcc_assert (IN_RANGE (val, 1, 32)); 4271 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << (val - 1)) - 1; 4272 HOST_WIDE_INT lower_bound = -upper_bound - 1; 4273 rtx up_rtx = gen_int_mode (upper_bound, SImode); 4274 rtx lo_rtx = gen_int_mode (lower_bound, SImode); 4275 if (ARM_Q_BIT_READ) 4276 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, 4277 up_rtx, operands[1])); 4278 else 4279 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1])); 4280 DONE; 4281 } 4282) 4283 4284(define_expand "arm_usat" 4285 [(match_operand:SI 0 "s_register_operand") 4286 (match_operand:SI 1 "s_register_operand") 4287 (match_operand:SI 2 "const_int_operand")] 4288 "TARGET_32BIT && arm_arch6" 4289 { 4290 HOST_WIDE_INT val = INTVAL (operands[2]); 4291 /* The builtin checking code should have ensured the right 4292 range for the immediate. */ 4293 gcc_assert (IN_RANGE (val, 0, 31)); 4294 HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << val) - 1; 4295 rtx up_rtx = gen_int_mode (upper_bound, SImode); 4296 rtx lo_rtx = CONST0_RTX (SImode); 4297 if (ARM_Q_BIT_READ) 4298 emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, up_rtx, 4299 operands[1])); 4300 else 4301 emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1])); 4302 DONE; 4303 } 4304) 4305 4306(define_insn "arm_get_apsr" 4307 [(set (match_operand:SI 0 "s_register_operand" "=r") 4308 (unspec:SI [(reg:CC APSRQ_REGNUM)] UNSPEC_APSR_READ))] 4309 "TARGET_ARM_QBIT" 4310 "mrs%?\t%0, APSR" 4311 [(set_attr "predicable" "yes") 4312 (set_attr "conds" "use")] 4313) 4314 4315(define_insn "arm_set_apsr" 4316 [(set (reg:CC APSRQ_REGNUM) 4317 (unspec_volatile:CC 4318 [(match_operand:SI 0 "s_register_operand" "r")] VUNSPEC_APSR_WRITE))] 4319 "TARGET_ARM_QBIT" 4320 "msr%?\tAPSR_nzcvq, %0" 4321 [(set_attr "predicable" "yes") 4322 (set_attr "conds" "set")] 4323) 4324 4325;; Read the APSR and extract the Q bit (bit 27) 4326(define_expand "arm_saturation_occurred" 4327 [(match_operand:SI 0 "s_register_operand")] 4328 "TARGET_ARM_QBIT" 4329 { 4330 rtx apsr = gen_reg_rtx (SImode); 4331 emit_insn (gen_arm_get_apsr (apsr)); 4332 emit_insn (gen_extzv (operands[0], apsr, CONST1_RTX (SImode), 4333 gen_int_mode (27, SImode))); 4334 DONE; 4335 } 4336) 4337 4338;; Read the APSR and set the Q bit (bit position 27) according to operand 0 4339(define_expand "arm_set_saturation" 4340 [(match_operand:SI 0 "reg_or_int_operand")] 4341 "TARGET_ARM_QBIT" 4342 { 4343 rtx apsr = gen_reg_rtx (SImode); 4344 emit_insn (gen_arm_get_apsr (apsr)); 4345 rtx to_insert = gen_reg_rtx (SImode); 4346 if (CONST_INT_P (operands[0])) 4347 emit_move_insn (to_insert, operands[0] == CONST0_RTX (SImode) 4348 ? CONST0_RTX (SImode) : CONST1_RTX (SImode)); 4349 else 4350 { 4351 rtx cmp = gen_rtx_NE (SImode, operands[0], CONST0_RTX (SImode)); 4352 emit_insn (gen_cstoresi4 (to_insert, cmp, operands[0], 4353 CONST0_RTX (SImode))); 4354 } 4355 emit_insn (gen_insv (apsr, CONST1_RTX (SImode), 4356 gen_int_mode (27, SImode), to_insert)); 4357 emit_insn (gen_arm_set_apsr (apsr)); 4358 DONE; 4359 } 4360) 4361 4362(define_insn "satsi_<SAT:code><add_clobber_q_name>" 4363 [(set (match_operand:SI 0 "s_register_operand" "=r") 4364 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r") 4365 (match_operand:SI 1 "const_int_operand" "i")) 4366 (match_operand:SI 2 "const_int_operand" "i")))] 4367 "TARGET_32BIT && arm_arch6 && <add_clobber_q_pred> 4368 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)" 4369{ 4370 int mask; 4371 bool signed_sat; 4372 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], 4373 &mask, &signed_sat)) 4374 gcc_unreachable (); 4375 4376 operands[1] = GEN_INT (mask); 4377 if (signed_sat) 4378 return "ssat%?\t%0, %1, %3"; 4379 else 4380 return "usat%?\t%0, %1, %3"; 4381} 4382 [(set_attr "predicable" "yes") 4383 (set_attr "type" "alus_imm")] 4384) 4385 4386(define_insn "*satsi_<SAT:code>_shift" 4387 [(set (match_operand:SI 0 "s_register_operand" "=r") 4388 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator" 4389 [(match_operand:SI 4 "s_register_operand" "r") 4390 (match_operand:SI 5 "const_int_operand" "i")]) 4391 (match_operand:SI 1 "const_int_operand" "i")) 4392 (match_operand:SI 2 "const_int_operand" "i")))] 4393 "TARGET_32BIT && arm_arch6 && !ARM_Q_BIT_READ 4394 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)" 4395{ 4396 int mask; 4397 bool signed_sat; 4398 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], 4399 &mask, &signed_sat)) 4400 gcc_unreachable (); 4401 4402 operands[1] = GEN_INT (mask); 4403 if (signed_sat) 4404 return "ssat%?\t%0, %1, %4%S3"; 4405 else 4406 return "usat%?\t%0, %1, %4%S3"; 4407} 4408 [(set_attr "predicable" "yes") 4409 (set_attr "shift" "3") 4410 (set_attr "type" "logic_shift_reg")]) 4411 4412;; Custom Datapath Extension insns. 4413(define_insn "arm_cx1<mode>" 4414 [(set (match_operand:SIDI 0 "s_register_operand" "=r") 4415 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i") 4416 (match_operand:SI 2 "const_int_ccde1_operand" "i")] 4417 UNSPEC_CDE))] 4418 "TARGET_CDE" 4419 "cx1<cde_suffix>\\tp%c1, <cde_dest>, %2" 4420 [(set_attr "type" "coproc")] 4421) 4422 4423(define_insn "arm_cx1a<mode>" 4424 [(set (match_operand:SIDI 0 "s_register_operand" "=r") 4425 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i") 4426 (match_operand:SIDI 2 "s_register_operand" "0") 4427 (match_operand:SI 3 "const_int_ccde1_operand" "i")] 4428 UNSPEC_CDEA))] 4429 "TARGET_CDE" 4430 "cx1<cde_suffix>a\\tp%c1, <cde_dest>, %3" 4431 [(set_attr "type" "coproc")] 4432) 4433 4434(define_insn "arm_cx2<mode>" 4435 [(set (match_operand:SIDI 0 "s_register_operand" "=r") 4436 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i") 4437 (match_operand:SI 2 "s_register_operand" "r") 4438 (match_operand:SI 3 "const_int_ccde2_operand" "i")] 4439 UNSPEC_CDE))] 4440 "TARGET_CDE" 4441 "cx2<cde_suffix>\\tp%c1, <cde_dest>, %2, %3" 4442 [(set_attr "type" "coproc")] 4443) 4444 4445(define_insn "arm_cx2a<mode>" 4446 [(set (match_operand:SIDI 0 "s_register_operand" "=r") 4447 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i") 4448 (match_operand:SIDI 2 "s_register_operand" "0") 4449 (match_operand:SI 3 "s_register_operand" "r") 4450 (match_operand:SI 4 "const_int_ccde2_operand" "i")] 4451 UNSPEC_CDEA))] 4452 "TARGET_CDE" 4453 "cx2<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4" 4454 [(set_attr "type" "coproc")] 4455) 4456 4457(define_insn "arm_cx3<mode>" 4458 [(set (match_operand:SIDI 0 "s_register_operand" "=r") 4459 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i") 4460 (match_operand:SI 2 "s_register_operand" "r") 4461 (match_operand:SI 3 "s_register_operand" "r") 4462 (match_operand:SI 4 "const_int_ccde3_operand" "i")] 4463 UNSPEC_CDE))] 4464 "TARGET_CDE" 4465 "cx3<cde_suffix>\\tp%c1, <cde_dest>, %2, %3, %4" 4466 [(set_attr "type" "coproc")] 4467) 4468 4469(define_insn "arm_cx3a<mode>" 4470 [(set (match_operand:SIDI 0 "s_register_operand" "=r") 4471 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i") 4472 (match_operand:SIDI 2 "s_register_operand" "0") 4473 (match_operand:SI 3 "s_register_operand" "r") 4474 (match_operand:SI 4 "s_register_operand" "r") 4475 (match_operand:SI 5 "const_int_ccde3_operand" "i")] 4476 UNSPEC_CDEA))] 4477 "TARGET_CDE" 4478 "cx3<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4, %5" 4479 [(set_attr "type" "coproc")] 4480) 4481 4482;; Shift and rotation insns 4483 4484(define_expand "ashldi3" 4485 [(set (match_operand:DI 0 "s_register_operand") 4486 (ashift:DI (match_operand:DI 1 "s_register_operand") 4487 (match_operand:SI 2 "reg_or_int_operand")))] 4488 "TARGET_32BIT" 4489 " 4490 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN) 4491 { 4492 if (!reg_or_int_operand (operands[2], SImode)) 4493 operands[2] = force_reg (SImode, operands[2]); 4494 4495 /* Armv8.1-M Mainline double shifts are not expanded. */ 4496 if (arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2])) 4497 && (REG_P (operands[2]) || INTVAL(operands[2]) != 32)) 4498 { 4499 if (!reg_overlap_mentioned_p(operands[0], operands[1])) 4500 emit_insn (gen_movdi (operands[0], operands[1])); 4501 4502 emit_insn (gen_thumb2_lsll (operands[0], operands[2])); 4503 DONE; 4504 } 4505 } 4506 4507 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1], 4508 operands[2], gen_reg_rtx (SImode), 4509 gen_reg_rtx (SImode)); 4510 DONE; 4511") 4512 4513(define_expand "ashlsi3" 4514 [(set (match_operand:SI 0 "s_register_operand") 4515 (ashift:SI (match_operand:SI 1 "s_register_operand") 4516 (match_operand:SI 2 "arm_rhs_operand")))] 4517 "TARGET_EITHER" 4518 " 4519 if (CONST_INT_P (operands[2]) 4520 && (UINTVAL (operands[2])) > 31) 4521 { 4522 emit_insn (gen_movsi (operands[0], const0_rtx)); 4523 DONE; 4524 } 4525 " 4526) 4527 4528(define_expand "ashrdi3" 4529 [(set (match_operand:DI 0 "s_register_operand") 4530 (ashiftrt:DI (match_operand:DI 1 "s_register_operand") 4531 (match_operand:SI 2 "reg_or_int_operand")))] 4532 "TARGET_32BIT" 4533 " 4534 /* Armv8.1-M Mainline double shifts are not expanded. */ 4535 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN 4536 && arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2]))) 4537 { 4538 if (!reg_overlap_mentioned_p(operands[0], operands[1])) 4539 emit_insn (gen_movdi (operands[0], operands[1])); 4540 4541 emit_insn (gen_thumb2_asrl (operands[0], operands[2])); 4542 DONE; 4543 } 4544 4545 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1], 4546 operands[2], gen_reg_rtx (SImode), 4547 gen_reg_rtx (SImode)); 4548 DONE; 4549") 4550 4551(define_expand "ashrsi3" 4552 [(set (match_operand:SI 0 "s_register_operand") 4553 (ashiftrt:SI (match_operand:SI 1 "s_register_operand") 4554 (match_operand:SI 2 "arm_rhs_operand")))] 4555 "TARGET_EITHER" 4556 " 4557 if (CONST_INT_P (operands[2]) 4558 && UINTVAL (operands[2]) > 31) 4559 operands[2] = GEN_INT (31); 4560 " 4561) 4562 4563(define_expand "lshrdi3" 4564 [(set (match_operand:DI 0 "s_register_operand") 4565 (lshiftrt:DI (match_operand:DI 1 "s_register_operand") 4566 (match_operand:SI 2 "reg_or_int_operand")))] 4567 "TARGET_32BIT" 4568 " 4569 /* Armv8.1-M Mainline double shifts are not expanded. */ 4570 if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN 4571 && long_shift_imm (operands[2], GET_MODE (operands[2]))) 4572 { 4573 if (!reg_overlap_mentioned_p(operands[0], operands[1])) 4574 emit_insn (gen_movdi (operands[0], operands[1])); 4575 4576 emit_insn (gen_thumb2_lsrl (operands[0], operands[2])); 4577 DONE; 4578 } 4579 4580 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1], 4581 operands[2], gen_reg_rtx (SImode), 4582 gen_reg_rtx (SImode)); 4583 DONE; 4584") 4585 4586(define_expand "lshrsi3" 4587 [(set (match_operand:SI 0 "s_register_operand") 4588 (lshiftrt:SI (match_operand:SI 1 "s_register_operand") 4589 (match_operand:SI 2 "arm_rhs_operand")))] 4590 "TARGET_EITHER" 4591 " 4592 if (CONST_INT_P (operands[2]) 4593 && (UINTVAL (operands[2])) > 31) 4594 { 4595 emit_insn (gen_movsi (operands[0], const0_rtx)); 4596 DONE; 4597 } 4598 " 4599) 4600 4601(define_expand "rotlsi3" 4602 [(set (match_operand:SI 0 "s_register_operand") 4603 (rotatert:SI (match_operand:SI 1 "s_register_operand") 4604 (match_operand:SI 2 "reg_or_int_operand")))] 4605 "TARGET_32BIT" 4606 " 4607 if (CONST_INT_P (operands[2])) 4608 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32); 4609 else 4610 { 4611 rtx reg = gen_reg_rtx (SImode); 4612 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2])); 4613 operands[2] = reg; 4614 } 4615 " 4616) 4617 4618(define_expand "rotrsi3" 4619 [(set (match_operand:SI 0 "s_register_operand") 4620 (rotatert:SI (match_operand:SI 1 "s_register_operand") 4621 (match_operand:SI 2 "arm_rhs_operand")))] 4622 "TARGET_EITHER" 4623 " 4624 if (TARGET_32BIT) 4625 { 4626 if (CONST_INT_P (operands[2]) 4627 && UINTVAL (operands[2]) > 31) 4628 operands[2] = GEN_INT (INTVAL (operands[2]) % 32); 4629 } 4630 else /* TARGET_THUMB1 */ 4631 { 4632 if (CONST_INT_P (operands [2])) 4633 operands [2] = force_reg (SImode, operands[2]); 4634 } 4635 " 4636) 4637 4638(define_insn "*arm_shiftsi3" 4639 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r") 4640 (match_operator:SI 3 "shift_operator" 4641 [(match_operand:SI 1 "s_register_operand" "0,l,r,r") 4642 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))] 4643 "TARGET_32BIT" 4644 "* return arm_output_shift(operands, 0);" 4645 [(set_attr "predicable" "yes") 4646 (set_attr "arch" "t2,t2,*,*") 4647 (set_attr "predicable_short_it" "yes,yes,no,no") 4648 (set_attr "length" "4") 4649 (set_attr "shift" "1") 4650 (set_attr "autodetect_type" "alu_shift_operator3")] 4651) 4652 4653(define_insn "*shiftsi3_compare0" 4654 [(set (reg:CC_NZ CC_REGNUM) 4655 (compare:CC_NZ (match_operator:SI 3 "shift_operator" 4656 [(match_operand:SI 1 "s_register_operand" "r,r") 4657 (match_operand:SI 2 "arm_rhs_operand" "M,r")]) 4658 (const_int 0))) 4659 (set (match_operand:SI 0 "s_register_operand" "=r,r") 4660 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))] 4661 "TARGET_32BIT" 4662 "* return arm_output_shift(operands, 1);" 4663 [(set_attr "conds" "set") 4664 (set_attr "shift" "1") 4665 (set_attr "type" "alus_shift_imm,alus_shift_reg")] 4666) 4667 4668(define_insn "*shiftsi3_compare0_scratch" 4669 [(set (reg:CC_NZ CC_REGNUM) 4670 (compare:CC_NZ (match_operator:SI 3 "shift_operator" 4671 [(match_operand:SI 1 "s_register_operand" "r,r") 4672 (match_operand:SI 2 "arm_rhs_operand" "M,r")]) 4673 (const_int 0))) 4674 (clobber (match_scratch:SI 0 "=r,r"))] 4675 "TARGET_32BIT" 4676 "* return arm_output_shift(operands, 1);" 4677 [(set_attr "conds" "set") 4678 (set_attr "shift" "1") 4679 (set_attr "type" "shift_imm,shift_reg")] 4680) 4681 4682(define_insn "*not_shiftsi" 4683 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 4684 (not:SI (match_operator:SI 3 "shift_operator" 4685 [(match_operand:SI 1 "s_register_operand" "r,r") 4686 (match_operand:SI 2 "shift_amount_operand" "M,r")])))] 4687 "TARGET_32BIT" 4688 "mvn%?\\t%0, %1%S3" 4689 [(set_attr "predicable" "yes") 4690 (set_attr "shift" "1") 4691 (set_attr "arch" "32,a") 4692 (set_attr "type" "mvn_shift,mvn_shift_reg")]) 4693 4694(define_insn "*not_shiftsi_compare0" 4695 [(set (reg:CC_NZ CC_REGNUM) 4696 (compare:CC_NZ 4697 (not:SI (match_operator:SI 3 "shift_operator" 4698 [(match_operand:SI 1 "s_register_operand" "r,r") 4699 (match_operand:SI 2 "shift_amount_operand" "M,r")])) 4700 (const_int 0))) 4701 (set (match_operand:SI 0 "s_register_operand" "=r,r") 4702 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))] 4703 "TARGET_32BIT" 4704 "mvns%?\\t%0, %1%S3" 4705 [(set_attr "conds" "set") 4706 (set_attr "shift" "1") 4707 (set_attr "arch" "32,a") 4708 (set_attr "type" "mvn_shift,mvn_shift_reg")]) 4709 4710(define_insn "*not_shiftsi_compare0_scratch" 4711 [(set (reg:CC_NZ CC_REGNUM) 4712 (compare:CC_NZ 4713 (not:SI (match_operator:SI 3 "shift_operator" 4714 [(match_operand:SI 1 "s_register_operand" "r,r") 4715 (match_operand:SI 2 "shift_amount_operand" "M,r")])) 4716 (const_int 0))) 4717 (clobber (match_scratch:SI 0 "=r,r"))] 4718 "TARGET_32BIT" 4719 "mvns%?\\t%0, %1%S3" 4720 [(set_attr "conds" "set") 4721 (set_attr "shift" "1") 4722 (set_attr "arch" "32,a") 4723 (set_attr "type" "mvn_shift,mvn_shift_reg")]) 4724 4725;; We don't really have extzv, but defining this using shifts helps 4726;; to reduce register pressure later on. 4727 4728(define_expand "extzv" 4729 [(set (match_operand 0 "s_register_operand") 4730 (zero_extract (match_operand 1 "nonimmediate_operand") 4731 (match_operand 2 "const_int_operand") 4732 (match_operand 3 "const_int_operand")))] 4733 "TARGET_THUMB1 || arm_arch_thumb2" 4734 " 4735 { 4736 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]); 4737 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]); 4738 4739 if (arm_arch_thumb2) 4740 { 4741 HOST_WIDE_INT width = INTVAL (operands[2]); 4742 HOST_WIDE_INT bitpos = INTVAL (operands[3]); 4743 4744 if (unaligned_access && MEM_P (operands[1]) 4745 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0) 4746 { 4747 rtx base_addr; 4748 4749 if (BYTES_BIG_ENDIAN) 4750 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width 4751 - bitpos; 4752 4753 if (width == 32) 4754 { 4755 base_addr = adjust_address (operands[1], SImode, 4756 bitpos / BITS_PER_UNIT); 4757 emit_insn (gen_unaligned_loadsi (operands[0], base_addr)); 4758 } 4759 else 4760 { 4761 rtx dest = operands[0]; 4762 rtx tmp = gen_reg_rtx (SImode); 4763 4764 /* We may get a paradoxical subreg here. Strip it off. */ 4765 if (GET_CODE (dest) == SUBREG 4766 && GET_MODE (dest) == SImode 4767 && GET_MODE (SUBREG_REG (dest)) == HImode) 4768 dest = SUBREG_REG (dest); 4769 4770 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width) 4771 FAIL; 4772 4773 base_addr = adjust_address (operands[1], HImode, 4774 bitpos / BITS_PER_UNIT); 4775 emit_insn (gen_unaligned_loadhiu (tmp, base_addr)); 4776 emit_move_insn (gen_lowpart (SImode, dest), tmp); 4777 } 4778 DONE; 4779 } 4780 else if (s_register_operand (operands[1], GET_MODE (operands[1]))) 4781 { 4782 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2], 4783 operands[3])); 4784 DONE; 4785 } 4786 else 4787 FAIL; 4788 } 4789 4790 if (!s_register_operand (operands[1], GET_MODE (operands[1]))) 4791 FAIL; 4792 4793 operands[3] = GEN_INT (rshift); 4794 4795 if (lshift == 0) 4796 { 4797 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3])); 4798 DONE; 4799 } 4800 4801 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift), 4802 operands[3], gen_reg_rtx (SImode))); 4803 DONE; 4804 }" 4805) 4806 4807;; Helper for extzv, for the Thumb-1 register-shifts case. 4808 4809(define_expand "extzv_t1" 4810 [(set (match_operand:SI 4 "s_register_operand") 4811 (ashift:SI (match_operand:SI 1 "nonimmediate_operand") 4812 (match_operand:SI 2 "const_int_operand"))) 4813 (set (match_operand:SI 0 "s_register_operand") 4814 (lshiftrt:SI (match_dup 4) 4815 (match_operand:SI 3 "const_int_operand")))] 4816 "TARGET_THUMB1" 4817 "") 4818 4819(define_expand "extv" 4820 [(set (match_operand 0 "s_register_operand") 4821 (sign_extract (match_operand 1 "nonimmediate_operand") 4822 (match_operand 2 "const_int_operand") 4823 (match_operand 3 "const_int_operand")))] 4824 "arm_arch_thumb2" 4825{ 4826 HOST_WIDE_INT width = INTVAL (operands[2]); 4827 HOST_WIDE_INT bitpos = INTVAL (operands[3]); 4828 4829 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32) 4830 && (bitpos % BITS_PER_UNIT) == 0) 4831 { 4832 rtx base_addr; 4833 4834 if (BYTES_BIG_ENDIAN) 4835 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos; 4836 4837 if (width == 32) 4838 { 4839 base_addr = adjust_address (operands[1], SImode, 4840 bitpos / BITS_PER_UNIT); 4841 emit_insn (gen_unaligned_loadsi (operands[0], base_addr)); 4842 } 4843 else 4844 { 4845 rtx dest = operands[0]; 4846 rtx tmp = gen_reg_rtx (SImode); 4847 4848 /* We may get a paradoxical subreg here. Strip it off. */ 4849 if (GET_CODE (dest) == SUBREG 4850 && GET_MODE (dest) == SImode 4851 && GET_MODE (SUBREG_REG (dest)) == HImode) 4852 dest = SUBREG_REG (dest); 4853 4854 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width) 4855 FAIL; 4856 4857 base_addr = adjust_address (operands[1], HImode, 4858 bitpos / BITS_PER_UNIT); 4859 emit_insn (gen_unaligned_loadhis (tmp, base_addr)); 4860 emit_move_insn (gen_lowpart (SImode, dest), tmp); 4861 } 4862 4863 DONE; 4864 } 4865 else if (!s_register_operand (operands[1], GET_MODE (operands[1]))) 4866 FAIL; 4867 else if (GET_MODE (operands[0]) == SImode 4868 && GET_MODE (operands[1]) == SImode) 4869 { 4870 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2], 4871 operands[3])); 4872 DONE; 4873 } 4874 4875 FAIL; 4876}) 4877 4878; Helper to expand register forms of extv with the proper modes. 4879 4880(define_expand "extv_regsi" 4881 [(set (match_operand:SI 0 "s_register_operand") 4882 (sign_extract:SI (match_operand:SI 1 "s_register_operand") 4883 (match_operand 2 "const_int_operand") 4884 (match_operand 3 "const_int_operand")))] 4885 "" 4886{ 4887}) 4888 4889; ARMv6+ unaligned load/store instructions (used for packed structure accesses). 4890 4891(define_insn "unaligned_loaddi" 4892 [(set (match_operand:DI 0 "s_register_operand" "=r") 4893 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")] 4894 UNSPEC_UNALIGNED_LOAD))] 4895 "TARGET_32BIT && TARGET_LDRD" 4896 "* 4897 return output_move_double (operands, true, NULL); 4898 " 4899 [(set_attr "length" "8") 4900 (set_attr "type" "load_8")]) 4901 4902(define_insn "unaligned_loadsi" 4903 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r") 4904 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")] 4905 UNSPEC_UNALIGNED_LOAD))] 4906 "unaligned_access" 4907 "@ 4908 ldr\t%0, %1\t@ unaligned 4909 ldr%?\t%0, %1\t@ unaligned 4910 ldr%?\t%0, %1\t@ unaligned" 4911 [(set_attr "arch" "t1,t2,32") 4912 (set_attr "length" "2,2,4") 4913 (set_attr "predicable" "no,yes,yes") 4914 (set_attr "predicable_short_it" "no,yes,no") 4915 (set_attr "type" "load_4")]) 4916 4917;; The 16-bit Thumb1 variant of ldrsh requires two registers in the 4918;; address (there's no immediate format). That's tricky to support 4919;; here and we don't really need this pattern for that case, so only 4920;; enable for 32-bit ISAs. 4921(define_insn "unaligned_loadhis" 4922 [(set (match_operand:SI 0 "s_register_operand" "=r") 4923 (sign_extend:SI 4924 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")] 4925 UNSPEC_UNALIGNED_LOAD)))] 4926 "unaligned_access && TARGET_32BIT" 4927 "ldrsh%?\t%0, %1\t@ unaligned" 4928 [(set_attr "predicable" "yes") 4929 (set_attr "type" "load_byte")]) 4930 4931(define_insn "unaligned_loadhiu" 4932 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r") 4933 (zero_extend:SI 4934 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")] 4935 UNSPEC_UNALIGNED_LOAD)))] 4936 "unaligned_access" 4937 "@ 4938 ldrh\t%0, %1\t@ unaligned 4939 ldrh%?\t%0, %1\t@ unaligned 4940 ldrh%?\t%0, %1\t@ unaligned" 4941 [(set_attr "arch" "t1,t2,32") 4942 (set_attr "length" "2,2,4") 4943 (set_attr "predicable" "no,yes,yes") 4944 (set_attr "predicable_short_it" "no,yes,no") 4945 (set_attr "type" "load_byte")]) 4946 4947(define_insn "unaligned_storedi" 4948 [(set (match_operand:DI 0 "memory_operand" "=m") 4949 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")] 4950 UNSPEC_UNALIGNED_STORE))] 4951 "TARGET_32BIT && TARGET_LDRD" 4952 "* 4953 return output_move_double (operands, true, NULL); 4954 " 4955 [(set_attr "length" "8") 4956 (set_attr "type" "store_8")]) 4957 4958(define_insn "unaligned_storesi" 4959 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m") 4960 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")] 4961 UNSPEC_UNALIGNED_STORE))] 4962 "unaligned_access" 4963 "@ 4964 str\t%1, %0\t@ unaligned 4965 str%?\t%1, %0\t@ unaligned 4966 str%?\t%1, %0\t@ unaligned" 4967 [(set_attr "arch" "t1,t2,32") 4968 (set_attr "length" "2,2,4") 4969 (set_attr "predicable" "no,yes,yes") 4970 (set_attr "predicable_short_it" "no,yes,no") 4971 (set_attr "type" "store_4")]) 4972 4973(define_insn "unaligned_storehi" 4974 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m") 4975 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")] 4976 UNSPEC_UNALIGNED_STORE))] 4977 "unaligned_access" 4978 "@ 4979 strh\t%1, %0\t@ unaligned 4980 strh%?\t%1, %0\t@ unaligned 4981 strh%?\t%1, %0\t@ unaligned" 4982 [(set_attr "arch" "t1,t2,32") 4983 (set_attr "length" "2,2,4") 4984 (set_attr "predicable" "no,yes,yes") 4985 (set_attr "predicable_short_it" "no,yes,no") 4986 (set_attr "type" "store_4")]) 4987 4988 4989(define_insn "*extv_reg" 4990 [(set (match_operand:SI 0 "s_register_operand" "=r") 4991 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r") 4992 (match_operand:SI 2 "const_int_operand" "n") 4993 (match_operand:SI 3 "const_int_operand" "n")))] 4994 "arm_arch_thumb2 4995 && IN_RANGE (INTVAL (operands[3]), 0, 31) 4996 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))" 4997 "sbfx%?\t%0, %1, %3, %2" 4998 [(set_attr "length" "4") 4999 (set_attr "predicable" "yes") 5000 (set_attr "type" "bfm")] 5001) 5002 5003(define_insn "extzv_t2" 5004 [(set (match_operand:SI 0 "s_register_operand" "=r") 5005 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r") 5006 (match_operand:SI 2 "const_int_operand" "n") 5007 (match_operand:SI 3 "const_int_operand" "n")))] 5008 "arm_arch_thumb2 5009 && IN_RANGE (INTVAL (operands[3]), 0, 31) 5010 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))" 5011 "ubfx%?\t%0, %1, %3, %2" 5012 [(set_attr "length" "4") 5013 (set_attr "predicable" "yes") 5014 (set_attr "type" "bfm")] 5015) 5016 5017 5018;; Division instructions 5019(define_insn "divsi3" 5020 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 5021 (div:SI (match_operand:SI 1 "s_register_operand" "r,r") 5022 (match_operand:SI 2 "s_register_operand" "r,r")))] 5023 "TARGET_IDIV" 5024 "@ 5025 sdiv%?\t%0, %1, %2 5026 sdiv\t%0, %1, %2" 5027 [(set_attr "arch" "32,v8mb") 5028 (set_attr "predicable" "yes") 5029 (set_attr "type" "sdiv")] 5030) 5031 5032(define_insn "udivsi3" 5033 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 5034 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r") 5035 (match_operand:SI 2 "s_register_operand" "r,r")))] 5036 "TARGET_IDIV" 5037 "@ 5038 udiv%?\t%0, %1, %2 5039 udiv\t%0, %1, %2" 5040 [(set_attr "arch" "32,v8mb") 5041 (set_attr "predicable" "yes") 5042 (set_attr "type" "udiv")] 5043) 5044 5045 5046;; Unary arithmetic insns 5047 5048(define_expand "negv<SIDI:mode>3" 5049 [(match_operand:SIDI 0 "s_register_operand") 5050 (match_operand:SIDI 1 "s_register_operand") 5051 (match_operand 2 "")] 5052 "TARGET_32BIT" 5053{ 5054 emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1], 5055 operands[2])); 5056 DONE; 5057}) 5058 5059(define_expand "negsi2" 5060 [(set (match_operand:SI 0 "s_register_operand") 5061 (neg:SI (match_operand:SI 1 "s_register_operand")))] 5062 "TARGET_EITHER" 5063 "" 5064) 5065 5066(define_insn "*arm_negsi2" 5067 [(set (match_operand:SI 0 "s_register_operand" "=l,r") 5068 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))] 5069 "TARGET_32BIT" 5070 "rsb%?\\t%0, %1, #0" 5071 [(set_attr "predicable" "yes") 5072 (set_attr "predicable_short_it" "yes,no") 5073 (set_attr "arch" "t2,*") 5074 (set_attr "length" "4") 5075 (set_attr "type" "alu_imm")] 5076) 5077 5078;; To keep the comparison in canonical form we express it as (~reg cmp ~0) 5079;; rather than (0 cmp reg). This gives the same results for unsigned 5080;; and equality compares which is what we mostly need here. 5081(define_insn "negsi2_0compare" 5082 [(set (reg:CC_RSB CC_REGNUM) 5083 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r")) 5084 (const_int -1))) 5085 (set (match_operand:SI 0 "s_register_operand" "=l,r") 5086 (neg:SI (match_dup 1)))] 5087 "TARGET_32BIT" 5088 "@ 5089 negs\\t%0, %1 5090 rsbs\\t%0, %1, #0" 5091 [(set_attr "conds" "set") 5092 (set_attr "arch" "t2,*") 5093 (set_attr "length" "2,*") 5094 (set_attr "type" "alus_imm")] 5095) 5096 5097(define_insn "negsi2_carryin" 5098 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 5099 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r")) 5100 (match_operand:SI 2 "arm_borrow_operation" "")))] 5101 "TARGET_32BIT" 5102 "@ 5103 rsc\\t%0, %1, #0 5104 sbc\\t%0, %1, %1, lsl #1" 5105 [(set_attr "conds" "use") 5106 (set_attr "arch" "a,t2") 5107 (set_attr "type" "adc_imm,adc_reg")] 5108) 5109 5110(define_expand "negsf2" 5111 [(set (match_operand:SF 0 "s_register_operand") 5112 (neg:SF (match_operand:SF 1 "s_register_operand")))] 5113 "TARGET_32BIT && TARGET_HARD_FLOAT" 5114 "" 5115) 5116 5117(define_expand "negdf2" 5118 [(set (match_operand:DF 0 "s_register_operand") 5119 (neg:DF (match_operand:DF 1 "s_register_operand")))] 5120 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE" 5121 "") 5122 5123;; abssi2 doesn't really clobber the condition codes if a different register 5124;; is being set. To keep things simple, assume during rtl manipulations that 5125;; it does, but tell the final scan operator the truth. Similarly for 5126;; (neg (abs...)) 5127 5128(define_expand "abssi2" 5129 [(parallel 5130 [(set (match_operand:SI 0 "s_register_operand") 5131 (abs:SI (match_operand:SI 1 "s_register_operand"))) 5132 (clobber (match_dup 2))])] 5133 "TARGET_EITHER" 5134 " 5135 if (TARGET_THUMB1) 5136 operands[2] = gen_rtx_SCRATCH (SImode); 5137 else 5138 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM); 5139") 5140 5141(define_insn_and_split "*arm_abssi2" 5142 [(set (match_operand:SI 0 "s_register_operand" "=r,&r") 5143 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))) 5144 (clobber (reg:CC CC_REGNUM))] 5145 "TARGET_ARM" 5146 "#" 5147 "&& reload_completed" 5148 [(const_int 0)] 5149 { 5150 /* if (which_alternative == 0) */ 5151 if (REGNO(operands[0]) == REGNO(operands[1])) 5152 { 5153 /* Emit the pattern: 5154 cmp\\t%0, #0\;rsblt\\t%0, %0, #0 5155 [(set (reg:CC CC_REGNUM) 5156 (compare:CC (match_dup 0) (const_int 0))) 5157 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0)) 5158 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))] 5159 */ 5160 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM), 5161 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx))); 5162 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 5163 (gen_rtx_LT (SImode, 5164 gen_rtx_REG (CCmode, CC_REGNUM), 5165 const0_rtx)), 5166 (gen_rtx_SET (operands[0], 5167 (gen_rtx_MINUS (SImode, 5168 const0_rtx, 5169 operands[1])))))); 5170 DONE; 5171 } 5172 else 5173 { 5174 /* Emit the pattern: 5175 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31 5176 [(set (match_dup 0) 5177 (xor:SI (match_dup 1) 5178 (ashiftrt:SI (match_dup 1) (const_int 31)))) 5179 (set (match_dup 0) 5180 (minus:SI (match_dup 0) 5181 (ashiftrt:SI (match_dup 1) (const_int 31))))] 5182 */ 5183 emit_insn (gen_rtx_SET (operands[0], 5184 gen_rtx_XOR (SImode, 5185 gen_rtx_ASHIFTRT (SImode, 5186 operands[1], 5187 GEN_INT (31)), 5188 operands[1]))); 5189 emit_insn (gen_rtx_SET (operands[0], 5190 gen_rtx_MINUS (SImode, 5191 operands[0], 5192 gen_rtx_ASHIFTRT (SImode, 5193 operands[1], 5194 GEN_INT (31))))); 5195 DONE; 5196 } 5197 } 5198 [(set_attr "conds" "clob,*") 5199 (set_attr "shift" "1") 5200 (set_attr "predicable" "no, yes") 5201 (set_attr "length" "8") 5202 (set_attr "type" "multiple")] 5203) 5204 5205(define_insn_and_split "*arm_neg_abssi2" 5206 [(set (match_operand:SI 0 "s_register_operand" "=r,&r") 5207 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))) 5208 (clobber (reg:CC CC_REGNUM))] 5209 "TARGET_ARM" 5210 "#" 5211 "&& reload_completed" 5212 [(const_int 0)] 5213 { 5214 /* if (which_alternative == 0) */ 5215 if (REGNO (operands[0]) == REGNO (operands[1])) 5216 { 5217 /* Emit the pattern: 5218 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0 5219 */ 5220 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM), 5221 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx))); 5222 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 5223 gen_rtx_GT (SImode, 5224 gen_rtx_REG (CCmode, CC_REGNUM), 5225 const0_rtx), 5226 gen_rtx_SET (operands[0], 5227 (gen_rtx_MINUS (SImode, 5228 const0_rtx, 5229 operands[1]))))); 5230 } 5231 else 5232 { 5233 /* Emit the pattern: 5234 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31 5235 */ 5236 emit_insn (gen_rtx_SET (operands[0], 5237 gen_rtx_XOR (SImode, 5238 gen_rtx_ASHIFTRT (SImode, 5239 operands[1], 5240 GEN_INT (31)), 5241 operands[1]))); 5242 emit_insn (gen_rtx_SET (operands[0], 5243 gen_rtx_MINUS (SImode, 5244 gen_rtx_ASHIFTRT (SImode, 5245 operands[1], 5246 GEN_INT (31)), 5247 operands[0]))); 5248 } 5249 DONE; 5250 } 5251 [(set_attr "conds" "clob,*") 5252 (set_attr "shift" "1") 5253 (set_attr "predicable" "no, yes") 5254 (set_attr "length" "8") 5255 (set_attr "type" "multiple")] 5256) 5257 5258(define_expand "abssf2" 5259 [(set (match_operand:SF 0 "s_register_operand") 5260 (abs:SF (match_operand:SF 1 "s_register_operand")))] 5261 "TARGET_32BIT && TARGET_HARD_FLOAT" 5262 "") 5263 5264(define_expand "absdf2" 5265 [(set (match_operand:DF 0 "s_register_operand") 5266 (abs:DF (match_operand:DF 1 "s_register_operand")))] 5267 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE" 5268 "") 5269 5270(define_expand "sqrtsf2" 5271 [(set (match_operand:SF 0 "s_register_operand") 5272 (sqrt:SF (match_operand:SF 1 "s_register_operand")))] 5273 "TARGET_32BIT && TARGET_HARD_FLOAT" 5274 "") 5275 5276(define_expand "sqrtdf2" 5277 [(set (match_operand:DF 0 "s_register_operand") 5278 (sqrt:DF (match_operand:DF 1 "s_register_operand")))] 5279 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE" 5280 "") 5281 5282(define_expand "one_cmplsi2" 5283 [(set (match_operand:SI 0 "s_register_operand") 5284 (not:SI (match_operand:SI 1 "s_register_operand")))] 5285 "TARGET_EITHER" 5286 "" 5287) 5288 5289(define_insn "*arm_one_cmplsi2" 5290 [(set (match_operand:SI 0 "s_register_operand" "=l,r") 5291 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))] 5292 "TARGET_32BIT" 5293 "mvn%?\\t%0, %1" 5294 [(set_attr "predicable" "yes") 5295 (set_attr "predicable_short_it" "yes,no") 5296 (set_attr "arch" "t2,*") 5297 (set_attr "length" "4") 5298 (set_attr "type" "mvn_reg")] 5299) 5300 5301(define_insn "*notsi_compare0" 5302 [(set (reg:CC_NZ CC_REGNUM) 5303 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r")) 5304 (const_int 0))) 5305 (set (match_operand:SI 0 "s_register_operand" "=r") 5306 (not:SI (match_dup 1)))] 5307 "TARGET_32BIT" 5308 "mvns%?\\t%0, %1" 5309 [(set_attr "conds" "set") 5310 (set_attr "type" "mvn_reg")] 5311) 5312 5313(define_insn "*notsi_compare0_scratch" 5314 [(set (reg:CC_NZ CC_REGNUM) 5315 (compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r")) 5316 (const_int 0))) 5317 (clobber (match_scratch:SI 0 "=r"))] 5318 "TARGET_32BIT" 5319 "mvns%?\\t%0, %1" 5320 [(set_attr "conds" "set") 5321 (set_attr "type" "mvn_reg")] 5322) 5323 5324;; Fixed <--> Floating conversion insns 5325 5326(define_expand "floatsihf2" 5327 [(set (match_operand:HF 0 "general_operand") 5328 (float:HF (match_operand:SI 1 "general_operand")))] 5329 "TARGET_EITHER" 5330 " 5331 { 5332 rtx op1 = gen_reg_rtx (SFmode); 5333 expand_float (op1, operands[1], 0); 5334 op1 = convert_to_mode (HFmode, op1, 0); 5335 emit_move_insn (operands[0], op1); 5336 DONE; 5337 }" 5338) 5339 5340(define_expand "floatdihf2" 5341 [(set (match_operand:HF 0 "general_operand") 5342 (float:HF (match_operand:DI 1 "general_operand")))] 5343 "TARGET_EITHER" 5344 " 5345 { 5346 rtx op1 = gen_reg_rtx (SFmode); 5347 expand_float (op1, operands[1], 0); 5348 op1 = convert_to_mode (HFmode, op1, 0); 5349 emit_move_insn (operands[0], op1); 5350 DONE; 5351 }" 5352) 5353 5354(define_expand "floatsisf2" 5355 [(set (match_operand:SF 0 "s_register_operand") 5356 (float:SF (match_operand:SI 1 "s_register_operand")))] 5357 "TARGET_32BIT && TARGET_HARD_FLOAT" 5358 " 5359") 5360 5361(define_expand "floatsidf2" 5362 [(set (match_operand:DF 0 "s_register_operand") 5363 (float:DF (match_operand:SI 1 "s_register_operand")))] 5364 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE" 5365 " 5366") 5367 5368(define_expand "fix_trunchfsi2" 5369 [(set (match_operand:SI 0 "general_operand") 5370 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))] 5371 "TARGET_EITHER" 5372 " 5373 { 5374 rtx op1 = convert_to_mode (SFmode, operands[1], 0); 5375 expand_fix (operands[0], op1, 0); 5376 DONE; 5377 }" 5378) 5379 5380(define_expand "fix_trunchfdi2" 5381 [(set (match_operand:DI 0 "general_operand") 5382 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))] 5383 "TARGET_EITHER" 5384 " 5385 { 5386 rtx op1 = convert_to_mode (SFmode, operands[1], 0); 5387 expand_fix (operands[0], op1, 0); 5388 DONE; 5389 }" 5390) 5391 5392(define_expand "fix_truncsfsi2" 5393 [(set (match_operand:SI 0 "s_register_operand") 5394 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))] 5395 "TARGET_32BIT && TARGET_HARD_FLOAT" 5396 " 5397") 5398 5399(define_expand "fix_truncdfsi2" 5400 [(set (match_operand:SI 0 "s_register_operand") 5401 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))] 5402 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE" 5403 " 5404") 5405 5406;; Truncation insns 5407 5408(define_expand "truncdfsf2" 5409 [(set (match_operand:SF 0 "s_register_operand") 5410 (float_truncate:SF 5411 (match_operand:DF 1 "s_register_operand")))] 5412 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE" 5413 "" 5414) 5415 5416;; DFmode to HFmode conversions on targets without a single-step hardware 5417;; instruction for it would have to go through SFmode. This is dangerous 5418;; as it introduces double rounding. 5419;; 5420;; Disable this pattern unless we are in an unsafe math mode, or we have 5421;; a single-step instruction. 5422 5423(define_expand "truncdfhf2" 5424 [(set (match_operand:HF 0 "s_register_operand") 5425 (float_truncate:HF 5426 (match_operand:DF 1 "s_register_operand")))] 5427 "(TARGET_EITHER && flag_unsafe_math_optimizations) 5428 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)" 5429{ 5430 /* We don't have a direct instruction for this, so we must be in 5431 an unsafe math mode, and going via SFmode. */ 5432 5433 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE)) 5434 { 5435 rtx op1; 5436 op1 = convert_to_mode (SFmode, operands[1], 0); 5437 op1 = convert_to_mode (HFmode, op1, 0); 5438 emit_move_insn (operands[0], op1); 5439 DONE; 5440 } 5441 /* Otherwise, we will pick this up as a single instruction with 5442 no intermediary rounding. */ 5443} 5444) 5445 5446;; Zero and sign extension instructions. 5447 5448(define_expand "zero_extend<mode>di2" 5449 [(set (match_operand:DI 0 "s_register_operand" "") 5450 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))] 5451 "TARGET_32BIT <qhs_zextenddi_cond>" 5452 { 5453 rtx res_lo, res_hi, op0_lo, op0_hi; 5454 res_lo = gen_lowpart (SImode, operands[0]); 5455 res_hi = gen_highpart (SImode, operands[0]); 5456 if (can_create_pseudo_p ()) 5457 { 5458 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode); 5459 op0_hi = gen_reg_rtx (SImode); 5460 } 5461 else 5462 { 5463 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo; 5464 op0_hi = res_hi; 5465 } 5466 if (<MODE>mode != SImode) 5467 emit_insn (gen_rtx_SET (op0_lo, 5468 gen_rtx_ZERO_EXTEND (SImode, operands[1]))); 5469 emit_insn (gen_movsi (op0_hi, const0_rtx)); 5470 if (res_lo != op0_lo) 5471 emit_move_insn (res_lo, op0_lo); 5472 if (res_hi != op0_hi) 5473 emit_move_insn (res_hi, op0_hi); 5474 DONE; 5475 } 5476) 5477 5478(define_expand "extend<mode>di2" 5479 [(set (match_operand:DI 0 "s_register_operand" "") 5480 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))] 5481 "TARGET_32BIT <qhs_sextenddi_cond>" 5482 { 5483 rtx res_lo, res_hi, op0_lo, op0_hi; 5484 res_lo = gen_lowpart (SImode, operands[0]); 5485 res_hi = gen_highpart (SImode, operands[0]); 5486 if (can_create_pseudo_p ()) 5487 { 5488 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode); 5489 op0_hi = gen_reg_rtx (SImode); 5490 } 5491 else 5492 { 5493 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo; 5494 op0_hi = res_hi; 5495 } 5496 if (<MODE>mode != SImode) 5497 emit_insn (gen_rtx_SET (op0_lo, 5498 gen_rtx_SIGN_EXTEND (SImode, operands[1]))); 5499 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31))); 5500 if (res_lo != op0_lo) 5501 emit_move_insn (res_lo, op0_lo); 5502 if (res_hi != op0_hi) 5503 emit_move_insn (res_hi, op0_hi); 5504 DONE; 5505 } 5506) 5507 5508;; Splits for all extensions to DImode 5509(define_split 5510 [(set (match_operand:DI 0 "s_register_operand" "") 5511 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))] 5512 "TARGET_32BIT" 5513 [(set (match_dup 0) (match_dup 1))] 5514{ 5515 rtx lo_part = gen_lowpart (SImode, operands[0]); 5516 machine_mode src_mode = GET_MODE (operands[1]); 5517 5518 if (src_mode == SImode) 5519 emit_move_insn (lo_part, operands[1]); 5520 else 5521 emit_insn (gen_rtx_SET (lo_part, 5522 gen_rtx_ZERO_EXTEND (SImode, operands[1]))); 5523 operands[0] = gen_highpart (SImode, operands[0]); 5524 operands[1] = const0_rtx; 5525}) 5526 5527(define_split 5528 [(set (match_operand:DI 0 "s_register_operand" "") 5529 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))] 5530 "TARGET_32BIT" 5531 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))] 5532{ 5533 rtx lo_part = gen_lowpart (SImode, operands[0]); 5534 machine_mode src_mode = GET_MODE (operands[1]); 5535 5536 if (src_mode == SImode) 5537 emit_move_insn (lo_part, operands[1]); 5538 else 5539 emit_insn (gen_rtx_SET (lo_part, 5540 gen_rtx_SIGN_EXTEND (SImode, operands[1]))); 5541 operands[1] = lo_part; 5542 operands[0] = gen_highpart (SImode, operands[0]); 5543}) 5544 5545(define_expand "zero_extendhisi2" 5546 [(set (match_operand:SI 0 "s_register_operand") 5547 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))] 5548 "TARGET_EITHER" 5549{ 5550 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1])) 5551 { 5552 emit_insn (gen_movhi_bytes (operands[0], operands[1])); 5553 DONE; 5554 } 5555 if (!arm_arch6 && !MEM_P (operands[1])) 5556 { 5557 rtx t = gen_lowpart (SImode, operands[1]); 5558 rtx tmp = gen_reg_rtx (SImode); 5559 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16))); 5560 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16))); 5561 DONE; 5562 } 5563}) 5564 5565(define_split 5566 [(set (match_operand:SI 0 "s_register_operand" "") 5567 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))] 5568 "!TARGET_THUMB2 && !arm_arch6" 5569 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16))) 5570 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))] 5571{ 5572 operands[2] = gen_lowpart (SImode, operands[1]); 5573}) 5574 5575(define_insn "*arm_zero_extendhisi2" 5576 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 5577 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))] 5578 "TARGET_ARM && arm_arch4 && !arm_arch6" 5579 "@ 5580 # 5581 ldrh%?\\t%0, %1" 5582 [(set_attr "type" "alu_shift_reg,load_byte") 5583 (set_attr "predicable" "yes")] 5584) 5585 5586(define_insn "*arm_zero_extendhisi2_v6" 5587 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 5588 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))] 5589 "TARGET_ARM && arm_arch6" 5590 "@ 5591 uxth%?\\t%0, %1 5592 ldrh%?\\t%0, %1" 5593 [(set_attr "predicable" "yes") 5594 (set_attr "type" "extend,load_byte")] 5595) 5596 5597(define_insn "*arm_zero_extendhisi2addsi" 5598 [(set (match_operand:SI 0 "s_register_operand" "=r") 5599 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r")) 5600 (match_operand:SI 2 "s_register_operand" "r")))] 5601 "TARGET_INT_SIMD" 5602 "uxtah%?\\t%0, %2, %1" 5603 [(set_attr "type" "alu_shift_reg") 5604 (set_attr "predicable" "yes")] 5605) 5606 5607(define_expand "zero_extendqisi2" 5608 [(set (match_operand:SI 0 "s_register_operand") 5609 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))] 5610 "TARGET_EITHER" 5611{ 5612 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1])) 5613 { 5614 emit_insn (gen_andsi3 (operands[0], 5615 gen_lowpart (SImode, operands[1]), 5616 GEN_INT (255))); 5617 DONE; 5618 } 5619 if (!arm_arch6 && !MEM_P (operands[1])) 5620 { 5621 rtx t = gen_lowpart (SImode, operands[1]); 5622 rtx tmp = gen_reg_rtx (SImode); 5623 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24))); 5624 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24))); 5625 DONE; 5626 } 5627}) 5628 5629(define_split 5630 [(set (match_operand:SI 0 "s_register_operand" "") 5631 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))] 5632 "!arm_arch6" 5633 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24))) 5634 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))] 5635{ 5636 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0); 5637 if (TARGET_ARM) 5638 { 5639 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255))); 5640 DONE; 5641 } 5642}) 5643 5644(define_insn "*arm_zero_extendqisi2" 5645 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 5646 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))] 5647 "TARGET_ARM && !arm_arch6" 5648 "@ 5649 # 5650 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2" 5651 [(set_attr "length" "8,4") 5652 (set_attr "type" "alu_shift_reg,load_byte") 5653 (set_attr "predicable" "yes")] 5654) 5655 5656(define_insn "*arm_zero_extendqisi2_v6" 5657 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 5658 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))] 5659 "TARGET_ARM && arm_arch6" 5660 "@ 5661 uxtb%?\\t%0, %1 5662 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2" 5663 [(set_attr "type" "extend,load_byte") 5664 (set_attr "predicable" "yes")] 5665) 5666 5667(define_insn "*arm_zero_extendqisi2addsi" 5668 [(set (match_operand:SI 0 "s_register_operand" "=r") 5669 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r")) 5670 (match_operand:SI 2 "s_register_operand" "r")))] 5671 "TARGET_INT_SIMD" 5672 "uxtab%?\\t%0, %2, %1" 5673 [(set_attr "predicable" "yes") 5674 (set_attr "type" "alu_shift_reg")] 5675) 5676 5677(define_split 5678 [(set (match_operand:SI 0 "s_register_operand" "") 5679 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0))) 5680 (clobber (match_operand:SI 2 "s_register_operand" ""))] 5681 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN" 5682 [(set (match_dup 2) (match_dup 1)) 5683 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))] 5684 "" 5685) 5686 5687(define_split 5688 [(set (match_operand:SI 0 "s_register_operand" "") 5689 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3))) 5690 (clobber (match_operand:SI 2 "s_register_operand" ""))] 5691 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN" 5692 [(set (match_dup 2) (match_dup 1)) 5693 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))] 5694 "" 5695) 5696 5697 5698(define_split 5699 [(set (match_operand:SI 0 "s_register_operand" "") 5700 (IOR_XOR:SI (and:SI (ashift:SI 5701 (match_operand:SI 1 "s_register_operand" "") 5702 (match_operand:SI 2 "const_int_operand" "")) 5703 (match_operand:SI 3 "const_int_operand" "")) 5704 (zero_extend:SI 5705 (match_operator 5 "subreg_lowpart_operator" 5706 [(match_operand:SI 4 "s_register_operand" "")]))))] 5707 "TARGET_32BIT 5708 && (UINTVAL (operands[3]) 5709 == (GET_MODE_MASK (GET_MODE (operands[5])) 5710 & (GET_MODE_MASK (GET_MODE (operands[5])) 5711 << (INTVAL (operands[2])))))" 5712 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2)) 5713 (match_dup 4))) 5714 (set (match_dup 0) (zero_extend:SI (match_dup 5)))] 5715 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);" 5716) 5717 5718(define_insn "*compareqi_eq0" 5719 [(set (reg:CC_Z CC_REGNUM) 5720 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r") 5721 (const_int 0)))] 5722 "TARGET_32BIT" 5723 "tst%?\\t%0, #255" 5724 [(set_attr "conds" "set") 5725 (set_attr "predicable" "yes") 5726 (set_attr "type" "logic_imm")] 5727) 5728 5729(define_expand "extendhisi2" 5730 [(set (match_operand:SI 0 "s_register_operand") 5731 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))] 5732 "TARGET_EITHER" 5733{ 5734 if (TARGET_THUMB1) 5735 { 5736 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1])); 5737 DONE; 5738 } 5739 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4) 5740 { 5741 emit_insn (gen_extendhisi2_mem (operands[0], operands[1])); 5742 DONE; 5743 } 5744 5745 if (!arm_arch6 && !MEM_P (operands[1])) 5746 { 5747 rtx t = gen_lowpart (SImode, operands[1]); 5748 rtx tmp = gen_reg_rtx (SImode); 5749 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16))); 5750 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16))); 5751 DONE; 5752 } 5753}) 5754 5755(define_split 5756 [(parallel 5757 [(set (match_operand:SI 0 "register_operand" "") 5758 (sign_extend:SI (match_operand:HI 1 "register_operand" ""))) 5759 (clobber (match_scratch:SI 2 ""))])] 5760 "!arm_arch6" 5761 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16))) 5762 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))] 5763{ 5764 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0); 5765}) 5766 5767;; This pattern will only be used when ldsh is not available 5768(define_expand "extendhisi2_mem" 5769 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" ""))) 5770 (set (match_dup 3) 5771 (zero_extend:SI (match_dup 7))) 5772 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24))) 5773 (set (match_operand:SI 0 "" "") 5774 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))] 5775 "TARGET_ARM" 5776 " 5777 { 5778 rtx mem1, mem2; 5779 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0)); 5780 5781 mem1 = change_address (operands[1], QImode, addr); 5782 mem2 = change_address (operands[1], QImode, 5783 plus_constant (Pmode, addr, 1)); 5784 operands[0] = gen_lowpart (SImode, operands[0]); 5785 operands[1] = mem1; 5786 operands[2] = gen_reg_rtx (SImode); 5787 operands[3] = gen_reg_rtx (SImode); 5788 operands[6] = gen_reg_rtx (SImode); 5789 operands[7] = mem2; 5790 5791 if (BYTES_BIG_ENDIAN) 5792 { 5793 operands[4] = operands[2]; 5794 operands[5] = operands[3]; 5795 } 5796 else 5797 { 5798 operands[4] = operands[3]; 5799 operands[5] = operands[2]; 5800 } 5801 }" 5802) 5803 5804(define_split 5805 [(set (match_operand:SI 0 "register_operand" "") 5806 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))] 5807 "!arm_arch6" 5808 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16))) 5809 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))] 5810{ 5811 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0); 5812}) 5813 5814(define_insn "*arm_extendhisi2" 5815 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 5816 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))] 5817 "TARGET_ARM && arm_arch4 && !arm_arch6" 5818 "@ 5819 # 5820 ldrsh%?\\t%0, %1" 5821 [(set_attr "length" "8,4") 5822 (set_attr "type" "alu_shift_reg,load_byte") 5823 (set_attr "predicable" "yes")] 5824) 5825 5826;; ??? Check Thumb-2 pool range 5827(define_insn "*arm_extendhisi2_v6" 5828 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 5829 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))] 5830 "TARGET_32BIT && arm_arch6" 5831 "@ 5832 sxth%?\\t%0, %1 5833 ldrsh%?\\t%0, %1" 5834 [(set_attr "type" "extend,load_byte") 5835 (set_attr "predicable" "yes")] 5836) 5837 5838(define_insn "*arm_extendhisi2addsi" 5839 [(set (match_operand:SI 0 "s_register_operand" "=r") 5840 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r")) 5841 (match_operand:SI 2 "s_register_operand" "r")))] 5842 "TARGET_INT_SIMD" 5843 "sxtah%?\\t%0, %2, %1" 5844 [(set_attr "type" "alu_shift_reg")] 5845) 5846 5847(define_expand "extendqihi2" 5848 [(set (match_dup 2) 5849 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op") 5850 (const_int 24))) 5851 (set (match_operand:HI 0 "s_register_operand") 5852 (ashiftrt:SI (match_dup 2) 5853 (const_int 24)))] 5854 "TARGET_ARM" 5855 " 5856 { 5857 if (arm_arch4 && MEM_P (operands[1])) 5858 { 5859 emit_insn (gen_rtx_SET (operands[0], 5860 gen_rtx_SIGN_EXTEND (HImode, operands[1]))); 5861 DONE; 5862 } 5863 if (!s_register_operand (operands[1], QImode)) 5864 operands[1] = copy_to_mode_reg (QImode, operands[1]); 5865 operands[0] = gen_lowpart (SImode, operands[0]); 5866 operands[1] = gen_lowpart (SImode, operands[1]); 5867 operands[2] = gen_reg_rtx (SImode); 5868 }" 5869) 5870 5871(define_insn "*arm_extendqihi_insn" 5872 [(set (match_operand:HI 0 "s_register_operand" "=r") 5873 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))] 5874 "TARGET_ARM && arm_arch4" 5875 "ldrsb%?\\t%0, %1" 5876 [(set_attr "type" "load_byte") 5877 (set_attr "predicable" "yes")] 5878) 5879 5880(define_expand "extendqisi2" 5881 [(set (match_operand:SI 0 "s_register_operand") 5882 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))] 5883 "TARGET_EITHER" 5884{ 5885 if (!arm_arch4 && MEM_P (operands[1])) 5886 operands[1] = copy_to_mode_reg (QImode, operands[1]); 5887 5888 if (!arm_arch6 && !MEM_P (operands[1])) 5889 { 5890 rtx t = gen_lowpart (SImode, operands[1]); 5891 rtx tmp = gen_reg_rtx (SImode); 5892 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24))); 5893 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24))); 5894 DONE; 5895 } 5896}) 5897 5898(define_split 5899 [(set (match_operand:SI 0 "register_operand" "") 5900 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))] 5901 "!arm_arch6" 5902 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24))) 5903 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))] 5904{ 5905 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0); 5906}) 5907 5908(define_insn "*arm_extendqisi" 5909 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 5910 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))] 5911 "TARGET_ARM && arm_arch4 && !arm_arch6" 5912 "@ 5913 # 5914 ldrsb%?\\t%0, %1" 5915 [(set_attr "length" "8,4") 5916 (set_attr "type" "alu_shift_reg,load_byte") 5917 (set_attr "predicable" "yes")] 5918) 5919 5920(define_insn "*arm_extendqisi_v6" 5921 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 5922 (sign_extend:SI 5923 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))] 5924 "TARGET_ARM && arm_arch6" 5925 "@ 5926 sxtb%?\\t%0, %1 5927 ldrsb%?\\t%0, %1" 5928 [(set_attr "type" "extend,load_byte") 5929 (set_attr "predicable" "yes")] 5930) 5931 5932(define_insn "*arm_extendqisi2addsi" 5933 [(set (match_operand:SI 0 "s_register_operand" "=r") 5934 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r")) 5935 (match_operand:SI 2 "s_register_operand" "r")))] 5936 "TARGET_INT_SIMD" 5937 "sxtab%?\\t%0, %2, %1" 5938 [(set_attr "type" "alu_shift_reg") 5939 (set_attr "predicable" "yes")] 5940) 5941 5942(define_insn "arm_<sup>xtb16" 5943 [(set (match_operand:SI 0 "s_register_operand" "=r") 5944 (unspec:SI 5945 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))] 5946 "TARGET_INT_SIMD" 5947 "<sup>xtb16%?\\t%0, %1" 5948 [(set_attr "predicable" "yes") 5949 (set_attr "type" "alu_dsp_reg")]) 5950 5951(define_insn "arm_<simd32_op>" 5952 [(set (match_operand:SI 0 "s_register_operand" "=r") 5953 (unspec:SI 5954 [(match_operand:SI 1 "s_register_operand" "r") 5955 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))] 5956 "TARGET_INT_SIMD" 5957 "<simd32_op>%?\\t%0, %1, %2" 5958 [(set_attr "predicable" "yes") 5959 (set_attr "type" "alu_dsp_reg")]) 5960 5961(define_insn "arm_usada8" 5962 [(set (match_operand:SI 0 "s_register_operand" "=r") 5963 (unspec:SI 5964 [(match_operand:SI 1 "s_register_operand" "r") 5965 (match_operand:SI 2 "s_register_operand" "r") 5966 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))] 5967 "TARGET_INT_SIMD" 5968 "usada8%?\\t%0, %1, %2, %3" 5969 [(set_attr "predicable" "yes") 5970 (set_attr "type" "alu_dsp_reg")]) 5971 5972(define_insn "arm_<simd32_op>" 5973 [(set (match_operand:DI 0 "s_register_operand" "=r") 5974 (unspec:DI 5975 [(match_operand:SI 1 "s_register_operand" "r") 5976 (match_operand:SI 2 "s_register_operand" "r") 5977 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))] 5978 "TARGET_INT_SIMD" 5979 "<simd32_op>%?\\t%Q0, %R0, %1, %2" 5980 [(set_attr "predicable" "yes") 5981 (set_attr "type" "smlald")]) 5982 5983(define_insn "arm_<simd32_op>" 5984 [(set (match_operand:SI 0 "s_register_operand" "=r") 5985 (unspec:SI 5986 [(match_operand:SI 1 "s_register_operand" "r") 5987 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_GE)) 5988 (set (reg:CC APSRGE_REGNUM) 5989 (unspec:CC [(reg:CC APSRGE_REGNUM)] UNSPEC_GE_SET))] 5990 "TARGET_INT_SIMD" 5991 "<simd32_op>%?\\t%0, %1, %2" 5992 [(set_attr "predicable" "yes") 5993 (set_attr "type" "alu_sreg")]) 5994 5995(define_insn "arm_<simd32_op><add_clobber_q_name>_insn" 5996 [(set (match_operand:SI 0 "s_register_operand" "=r") 5997 (unspec:SI 5998 [(match_operand:SI 1 "s_register_operand" "r") 5999 (match_operand:SI 2 "s_register_operand" "r") 6000 (match_operand:SI 3 "s_register_operand" "r")] SIMD32_TERNOP_Q))] 6001 "TARGET_INT_SIMD && <add_clobber_q_pred>" 6002 "<simd32_op>%?\\t%0, %1, %2, %3" 6003 [(set_attr "predicable" "yes") 6004 (set_attr "type" "alu_sreg")]) 6005 6006(define_expand "arm_<simd32_op>" 6007 [(set (match_operand:SI 0 "s_register_operand") 6008 (unspec:SI 6009 [(match_operand:SI 1 "s_register_operand") 6010 (match_operand:SI 2 "s_register_operand") 6011 (match_operand:SI 3 "s_register_operand")] SIMD32_TERNOP_Q))] 6012 "TARGET_INT_SIMD" 6013 { 6014 if (ARM_Q_BIT_READ) 6015 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1], 6016 operands[2], operands[3])); 6017 else 6018 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1], 6019 operands[2], operands[3])); 6020 DONE; 6021 } 6022) 6023 6024(define_insn "arm_<simd32_op><add_clobber_q_name>_insn" 6025 [(set (match_operand:SI 0 "s_register_operand" "=r") 6026 (unspec:SI 6027 [(match_operand:SI 1 "s_register_operand" "r") 6028 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_BINOP_Q))] 6029 "TARGET_INT_SIMD && <add_clobber_q_pred>" 6030 "<simd32_op>%?\\t%0, %1, %2" 6031 [(set_attr "predicable" "yes") 6032 (set_attr "type" "alu_sreg")]) 6033 6034(define_expand "arm_<simd32_op>" 6035 [(set (match_operand:SI 0 "s_register_operand") 6036 (unspec:SI 6037 [(match_operand:SI 1 "s_register_operand") 6038 (match_operand:SI 2 "s_register_operand")] SIMD32_BINOP_Q))] 6039 "TARGET_INT_SIMD" 6040 { 6041 if (ARM_Q_BIT_READ) 6042 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1], 6043 operands[2])); 6044 else 6045 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1], 6046 operands[2])); 6047 DONE; 6048 } 6049) 6050 6051(define_insn "arm_<simd32_op><add_clobber_q_name>_insn" 6052 [(set (match_operand:SI 0 "s_register_operand" "=r") 6053 (unspec:SI 6054 [(match_operand:SI 1 "s_register_operand" "r") 6055 (match_operand:SI 2 "<sup>sat16_imm" "i")] USSAT16))] 6056 "TARGET_INT_SIMD && <add_clobber_q_pred>" 6057 "<simd32_op>%?\\t%0, %2, %1" 6058 [(set_attr "predicable" "yes") 6059 (set_attr "type" "alu_sreg")]) 6060 6061(define_expand "arm_<simd32_op>" 6062 [(set (match_operand:SI 0 "s_register_operand") 6063 (unspec:SI 6064 [(match_operand:SI 1 "s_register_operand") 6065 (match_operand:SI 2 "<sup>sat16_imm")] USSAT16))] 6066 "TARGET_INT_SIMD" 6067 { 6068 if (ARM_Q_BIT_READ) 6069 emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1], 6070 operands[2])); 6071 else 6072 emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1], 6073 operands[2])); 6074 DONE; 6075 } 6076) 6077 6078(define_insn "arm_sel" 6079 [(set (match_operand:SI 0 "s_register_operand" "=r") 6080 (unspec:SI 6081 [(match_operand:SI 1 "s_register_operand" "r") 6082 (match_operand:SI 2 "s_register_operand" "r") 6083 (reg:CC APSRGE_REGNUM)] UNSPEC_SEL))] 6084 "TARGET_INT_SIMD" 6085 "sel%?\\t%0, %1, %2" 6086 [(set_attr "predicable" "yes") 6087 (set_attr "type" "alu_sreg")]) 6088 6089(define_expand "extendsfdf2" 6090 [(set (match_operand:DF 0 "s_register_operand") 6091 (float_extend:DF (match_operand:SF 1 "s_register_operand")))] 6092 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE" 6093 "" 6094) 6095 6096;; HFmode -> DFmode conversions where we don't have an instruction for it 6097;; must go through SFmode. 6098;; 6099;; This is always safe for an extend. 6100 6101(define_expand "extendhfdf2" 6102 [(set (match_operand:DF 0 "s_register_operand") 6103 (float_extend:DF (match_operand:HF 1 "s_register_operand")))] 6104 "TARGET_EITHER" 6105{ 6106 /* We don't have a direct instruction for this, so go via SFmode. */ 6107 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE)) 6108 { 6109 rtx op1; 6110 op1 = convert_to_mode (SFmode, operands[1], 0); 6111 op1 = convert_to_mode (DFmode, op1, 0); 6112 emit_insn (gen_movdf (operands[0], op1)); 6113 DONE; 6114 } 6115 /* Otherwise, we're done producing RTL and will pick up the correct 6116 pattern to do this with one rounding-step in a single instruction. */ 6117} 6118) 6119 6120;; Move insns (including loads and stores) 6121 6122;; XXX Just some ideas about movti. 6123;; I don't think these are a good idea on the arm, there just aren't enough 6124;; registers 6125;;(define_expand "loadti" 6126;; [(set (match_operand:TI 0 "s_register_operand") 6127;; (mem:TI (match_operand:SI 1 "address_operand")))] 6128;; "" "") 6129 6130;;(define_expand "storeti" 6131;; [(set (mem:TI (match_operand:TI 0 "address_operand")) 6132;; (match_operand:TI 1 "s_register_operand"))] 6133;; "" "") 6134 6135;;(define_expand "movti" 6136;; [(set (match_operand:TI 0 "general_operand") 6137;; (match_operand:TI 1 "general_operand"))] 6138;; "" 6139;; " 6140;;{ 6141;; rtx insn; 6142;; 6143;; if (MEM_P (operands[0]) && MEM_P (operands[1])) 6144;; operands[1] = copy_to_reg (operands[1]); 6145;; if (MEM_P (operands[0])) 6146;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]); 6147;; else if (MEM_P (operands[1])) 6148;; insn = gen_loadti (operands[0], XEXP (operands[1], 0)); 6149;; else 6150;; FAIL; 6151;; 6152;; emit_insn (insn); 6153;; DONE; 6154;;}") 6155 6156;; Recognize garbage generated above. 6157 6158;;(define_insn "" 6159;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m") 6160;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))] 6161;; "" 6162;; "* 6163;; { 6164;; register mem = (which_alternative < 3); 6165;; register const char *template; 6166;; 6167;; operands[mem] = XEXP (operands[mem], 0); 6168;; switch (which_alternative) 6169;; { 6170;; case 0: template = \"ldmdb\\t%1!, %M0\"; break; 6171;; case 1: template = \"ldmia\\t%1!, %M0\"; break; 6172;; case 2: template = \"ldmia\\t%1, %M0\"; break; 6173;; case 3: template = \"stmdb\\t%0!, %M1\"; break; 6174;; case 4: template = \"stmia\\t%0!, %M1\"; break; 6175;; case 5: template = \"stmia\\t%0, %M1\"; break; 6176;; } 6177;; output_asm_insn (template, operands); 6178;; return \"\"; 6179;; }") 6180 6181(define_expand "movdi" 6182 [(set (match_operand:DI 0 "general_operand") 6183 (match_operand:DI 1 "general_operand"))] 6184 "TARGET_EITHER" 6185 " 6186 gcc_checking_assert (aligned_operand (operands[0], DImode)); 6187 gcc_checking_assert (aligned_operand (operands[1], DImode)); 6188 if (can_create_pseudo_p ()) 6189 { 6190 if (!REG_P (operands[0])) 6191 operands[1] = force_reg (DImode, operands[1]); 6192 } 6193 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM 6194 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode)) 6195 { 6196 /* Avoid LDRD's into an odd-numbered register pair in ARM state 6197 when expanding function calls. */ 6198 gcc_assert (can_create_pseudo_p ()); 6199 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1])) 6200 { 6201 /* Perform load into legal reg pair first, then move. */ 6202 rtx reg = gen_reg_rtx (DImode); 6203 emit_insn (gen_movdi (reg, operands[1])); 6204 operands[1] = reg; 6205 } 6206 emit_move_insn (gen_lowpart (SImode, operands[0]), 6207 gen_lowpart (SImode, operands[1])); 6208 emit_move_insn (gen_highpart (SImode, operands[0]), 6209 gen_highpart (SImode, operands[1])); 6210 DONE; 6211 } 6212 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM 6213 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode)) 6214 { 6215 /* Avoid STRD's from an odd-numbered register pair in ARM state 6216 when expanding function prologue. */ 6217 gcc_assert (can_create_pseudo_p ()); 6218 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0])) 6219 ? gen_reg_rtx (DImode) 6220 : operands[0]; 6221 emit_move_insn (gen_lowpart (SImode, split_dest), 6222 gen_lowpart (SImode, operands[1])); 6223 emit_move_insn (gen_highpart (SImode, split_dest), 6224 gen_highpart (SImode, operands[1])); 6225 if (split_dest != operands[0]) 6226 emit_insn (gen_movdi (operands[0], split_dest)); 6227 DONE; 6228 } 6229 " 6230) 6231 6232(define_insn "*arm_movdi" 6233 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m") 6234 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))] 6235 "TARGET_32BIT 6236 && !(TARGET_HARD_FLOAT) 6237 && !(TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT) 6238 && !TARGET_IWMMXT 6239 && ( register_operand (operands[0], DImode) 6240 || register_operand (operands[1], DImode))" 6241 "* 6242 switch (which_alternative) 6243 { 6244 case 0: 6245 case 1: 6246 case 2: 6247 return \"#\"; 6248 case 3: 6249 /* Cannot load it directly, split to load it via MOV / MOVT. */ 6250 if (!MEM_P (operands[1]) && arm_disable_literal_pool) 6251 return \"#\"; 6252 /* Fall through. */ 6253 default: 6254 return output_move_double (operands, true, NULL); 6255 } 6256 " 6257 [(set_attr "length" "8,12,16,8,8") 6258 (set_attr "type" "multiple,multiple,multiple,load_8,store_8") 6259 (set_attr "arm_pool_range" "*,*,*,1020,*") 6260 (set_attr "arm_neg_pool_range" "*,*,*,1004,*") 6261 (set_attr "thumb2_pool_range" "*,*,*,4094,*") 6262 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")] 6263) 6264 6265(define_split 6266 [(set (match_operand:ANY64 0 "arm_general_register_operand" "") 6267 (match_operand:ANY64 1 "immediate_operand" ""))] 6268 "TARGET_32BIT 6269 && reload_completed 6270 && (arm_disable_literal_pool 6271 || (arm_const_double_inline_cost (operands[1]) 6272 <= arm_max_const_double_inline_cost ()))" 6273 [(const_int 0)] 6274 " 6275 arm_split_constant (SET, SImode, curr_insn, 6276 INTVAL (gen_lowpart (SImode, operands[1])), 6277 gen_lowpart (SImode, operands[0]), NULL_RTX, 0); 6278 arm_split_constant (SET, SImode, curr_insn, 6279 INTVAL (gen_highpart_mode (SImode, 6280 GET_MODE (operands[0]), 6281 operands[1])), 6282 gen_highpart (SImode, operands[0]), NULL_RTX, 0); 6283 DONE; 6284 " 6285) 6286 6287; If optimizing for size, or if we have load delay slots, then 6288; we want to split the constant into two separate operations. 6289; In both cases this may split a trivial part into a single data op 6290; leaving a single complex constant to load. We can also get longer 6291; offsets in a LDR which means we get better chances of sharing the pool 6292; entries. Finally, we can normally do a better job of scheduling 6293; LDR instructions than we can with LDM. 6294; This pattern will only match if the one above did not. 6295(define_split 6296 [(set (match_operand:ANY64 0 "arm_general_register_operand" "") 6297 (match_operand:ANY64 1 "const_double_operand" ""))] 6298 "TARGET_ARM && reload_completed 6299 && arm_const_double_by_parts (operands[1])" 6300 [(set (match_dup 0) (match_dup 1)) 6301 (set (match_dup 2) (match_dup 3))] 6302 " 6303 operands[2] = gen_highpart (SImode, operands[0]); 6304 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]), 6305 operands[1]); 6306 operands[0] = gen_lowpart (SImode, operands[0]); 6307 operands[1] = gen_lowpart (SImode, operands[1]); 6308 " 6309) 6310 6311(define_split 6312 [(set (match_operand:ANY64_BF 0 "arm_general_register_operand" "") 6313 (match_operand:ANY64_BF 1 "arm_general_register_operand" ""))] 6314 "TARGET_EITHER && reload_completed" 6315 [(set (match_dup 0) (match_dup 1)) 6316 (set (match_dup 2) (match_dup 3))] 6317 " 6318 operands[2] = gen_highpart (SImode, operands[0]); 6319 operands[3] = gen_highpart (SImode, operands[1]); 6320 operands[0] = gen_lowpart (SImode, operands[0]); 6321 operands[1] = gen_lowpart (SImode, operands[1]); 6322 6323 /* Handle a partial overlap. */ 6324 if (rtx_equal_p (operands[0], operands[3])) 6325 { 6326 rtx tmp0 = operands[0]; 6327 rtx tmp1 = operands[1]; 6328 6329 operands[0] = operands[2]; 6330 operands[1] = operands[3]; 6331 operands[2] = tmp0; 6332 operands[3] = tmp1; 6333 } 6334 " 6335) 6336 6337;; We can't actually do base+index doubleword loads if the index and 6338;; destination overlap. Split here so that we at least have chance to 6339;; schedule. 6340(define_split 6341 [(set (match_operand:DI 0 "s_register_operand" "") 6342 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "") 6343 (match_operand:SI 2 "s_register_operand" ""))))] 6344 "TARGET_LDRD 6345 && reg_overlap_mentioned_p (operands[0], operands[1]) 6346 && reg_overlap_mentioned_p (operands[0], operands[2])" 6347 [(set (match_dup 4) 6348 (plus:SI (match_dup 1) 6349 (match_dup 2))) 6350 (set (match_dup 0) 6351 (mem:DI (match_dup 4)))] 6352 " 6353 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0])); 6354 " 6355) 6356 6357(define_expand "movsi" 6358 [(set (match_operand:SI 0 "general_operand") 6359 (match_operand:SI 1 "general_operand"))] 6360 "TARGET_EITHER" 6361 " 6362 { 6363 rtx base, offset, tmp; 6364 6365 gcc_checking_assert (aligned_operand (operands[0], SImode)); 6366 gcc_checking_assert (aligned_operand (operands[1], SImode)); 6367 if (TARGET_32BIT || TARGET_HAVE_MOVT) 6368 { 6369 /* Everything except mem = const or mem = mem can be done easily. */ 6370 if (MEM_P (operands[0])) 6371 operands[1] = force_reg (SImode, operands[1]); 6372 if (arm_general_register_operand (operands[0], SImode) 6373 && CONST_INT_P (operands[1]) 6374 && !(const_ok_for_arm (INTVAL (operands[1])) 6375 || const_ok_for_arm (~INTVAL (operands[1])))) 6376 { 6377 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET)) 6378 { 6379 emit_insn (gen_rtx_SET (operands[0], operands[1])); 6380 DONE; 6381 } 6382 else 6383 { 6384 arm_split_constant (SET, SImode, NULL_RTX, 6385 INTVAL (operands[1]), operands[0], NULL_RTX, 6386 optimize && can_create_pseudo_p ()); 6387 DONE; 6388 } 6389 } 6390 } 6391 else /* Target doesn't have MOVT... */ 6392 { 6393 if (can_create_pseudo_p ()) 6394 { 6395 if (!REG_P (operands[0])) 6396 operands[1] = force_reg (SImode, operands[1]); 6397 } 6398 } 6399 6400 split_const (operands[1], &base, &offset); 6401 if (INTVAL (offset) != 0 6402 && targetm.cannot_force_const_mem (SImode, operands[1])) 6403 { 6404 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0]; 6405 emit_move_insn (tmp, base); 6406 emit_insn (gen_addsi3 (operands[0], tmp, offset)); 6407 DONE; 6408 } 6409 6410 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0]; 6411 6412 /* Recognize the case where operand[1] is a reference to thread-local 6413 data and load its address to a register. Offsets have been split off 6414 already. */ 6415 if (arm_tls_referenced_p (operands[1])) 6416 operands[1] = legitimize_tls_address (operands[1], tmp); 6417 else if (flag_pic 6418 && (CONSTANT_P (operands[1]) 6419 || symbol_mentioned_p (operands[1]) 6420 || label_mentioned_p (operands[1]))) 6421 operands[1] = 6422 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false); 6423 } 6424 " 6425) 6426 6427;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and 6428;; LO_SUM adds in the high bits. Fortunately these are opaque operations 6429;; so this does not matter. 6430(define_insn "*arm_movt" 6431 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r") 6432 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0") 6433 (match_operand:SI 2 "general_operand" "i,i")))] 6434 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])" 6435 "@ 6436 movt%?\t%0, #:upper16:%c2 6437 movt\t%0, #:upper16:%c2" 6438 [(set_attr "arch" "32,v8mb") 6439 (set_attr "predicable" "yes") 6440 (set_attr "length" "4") 6441 (set_attr "type" "alu_sreg")] 6442) 6443 6444(define_insn "*arm_movsi_insn" 6445 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m") 6446 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))] 6447 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT 6448 && ( register_operand (operands[0], SImode) 6449 || register_operand (operands[1], SImode))" 6450 "@ 6451 mov%?\\t%0, %1 6452 mov%?\\t%0, %1 6453 mvn%?\\t%0, #%B1 6454 movw%?\\t%0, %1 6455 ldr%?\\t%0, %1 6456 str%?\\t%1, %0" 6457 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4") 6458 (set_attr "predicable" "yes") 6459 (set_attr "arch" "*,*,*,v6t2,*,*") 6460 (set_attr "pool_range" "*,*,*,*,4096,*") 6461 (set_attr "neg_pool_range" "*,*,*,*,4084,*")] 6462) 6463 6464(define_split 6465 [(set (match_operand:SI 0 "arm_general_register_operand" "") 6466 (match_operand:SI 1 "const_int_operand" ""))] 6467 "(TARGET_32BIT || TARGET_HAVE_MOVT) 6468 && (!(const_ok_for_arm (INTVAL (operands[1])) 6469 || const_ok_for_arm (~INTVAL (operands[1]))))" 6470 [(clobber (const_int 0))] 6471 " 6472 arm_split_constant (SET, SImode, NULL_RTX, 6473 INTVAL (operands[1]), operands[0], NULL_RTX, 0); 6474 DONE; 6475 " 6476) 6477 6478;; A normal way to do (symbol + offset) requires three instructions at least 6479;; (depends on how big the offset is) as below: 6480;; movw r0, #:lower16:g 6481;; movw r0, #:upper16:g 6482;; adds r0, #4 6483;; 6484;; A better way would be: 6485;; movw r0, #:lower16:g+4 6486;; movw r0, #:upper16:g+4 6487;; 6488;; The limitation of this way is that the length of offset should be a 16-bit 6489;; signed value, because current assembler only supports REL type relocation for 6490;; such case. If the more powerful RELA type is supported in future, we should 6491;; update this pattern to go with better way. 6492(define_split 6493 [(set (match_operand:SI 0 "arm_general_register_operand" "") 6494 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "") 6495 (match_operand:SI 2 "const_int_operand" ""))))] 6496 "TARGET_THUMB 6497 && TARGET_HAVE_MOVT 6498 && arm_disable_literal_pool 6499 && reload_completed 6500 && GET_CODE (operands[1]) == SYMBOL_REF" 6501 [(clobber (const_int 0))] 6502 " 6503 int offset = INTVAL (operands[2]); 6504 6505 if (offset < -0x8000 || offset > 0x7fff) 6506 { 6507 arm_emit_movpair (operands[0], operands[1]); 6508 emit_insn (gen_rtx_SET (operands[0], 6509 gen_rtx_PLUS (SImode, operands[0], operands[2]))); 6510 } 6511 else 6512 { 6513 rtx op = gen_rtx_CONST (SImode, 6514 gen_rtx_PLUS (SImode, operands[1], operands[2])); 6515 arm_emit_movpair (operands[0], op); 6516 } 6517 " 6518) 6519 6520;; Split symbol_refs at the later stage (after cprop), instead of generating 6521;; movt/movw pair directly at expand. Otherwise corresponding high_sum 6522;; and lo_sum would be merged back into memory load at cprop. However, 6523;; if the default is to prefer movt/movw rather than a load from the constant 6524;; pool, the performance is better. 6525(define_split 6526 [(set (match_operand:SI 0 "arm_general_register_operand" "") 6527 (match_operand:SI 1 "general_operand" ""))] 6528 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF 6529 && !target_word_relocations 6530 && !arm_tls_referenced_p (operands[1])" 6531 [(clobber (const_int 0))] 6532{ 6533 arm_emit_movpair (operands[0], operands[1]); 6534 DONE; 6535}) 6536 6537;; When generating pic, we need to load the symbol offset into a register. 6538;; So that the optimizer does not confuse this with a normal symbol load 6539;; we use an unspec. The offset will be loaded from a constant pool entry, 6540;; since that is the only type of relocation we can use. 6541 6542;; Wrap calculation of the whole PIC address in a single pattern for the 6543;; benefit of optimizers, particularly, PRE and HOIST. Calculation of 6544;; a PIC address involves two loads from memory, so we want to CSE it 6545;; as often as possible. 6546;; This pattern will be split into one of the pic_load_addr_* patterns 6547;; and a move after GCSE optimizations. 6548;; 6549;; Note: Update arm.c: legitimize_pic_address() when changing this pattern. 6550(define_expand "calculate_pic_address" 6551 [(set (match_operand:SI 0 "register_operand") 6552 (mem:SI (plus:SI (match_operand:SI 1 "register_operand") 6553 (unspec:SI [(match_operand:SI 2 "" "")] 6554 UNSPEC_PIC_SYM))))] 6555 "flag_pic" 6556) 6557 6558;; Split calculate_pic_address into pic_load_addr_* and a move. 6559(define_split 6560 [(set (match_operand:SI 0 "register_operand" "") 6561 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "") 6562 (unspec:SI [(match_operand:SI 2 "" "")] 6563 UNSPEC_PIC_SYM))))] 6564 "flag_pic" 6565 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM)) 6566 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))] 6567 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];" 6568) 6569 6570;; operand1 is the memory address to go into 6571;; pic_load_addr_32bit. 6572;; operand2 is the PIC label to be emitted 6573;; from pic_add_dot_plus_eight. 6574;; We do this to allow hoisting of the entire insn. 6575(define_insn_and_split "pic_load_addr_unified" 6576 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l") 6577 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX") 6578 (match_operand:SI 2 "" "")] 6579 UNSPEC_PIC_UNIFIED))] 6580 "flag_pic" 6581 "#" 6582 "&& reload_completed" 6583 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM)) 6584 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3) 6585 (match_dup 2)] UNSPEC_PIC_BASE))] 6586 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);" 6587 [(set_attr "type" "load_4,load_4,load_4") 6588 (set_attr "pool_range" "4096,4094,1022") 6589 (set_attr "neg_pool_range" "4084,0,0") 6590 (set_attr "arch" "a,t2,t1") 6591 (set_attr "length" "8,6,4")] 6592) 6593 6594;; The rather odd constraints on the following are to force reload to leave 6595;; the insn alone, and to force the minipool generation pass to then move 6596;; the GOT symbol to memory. 6597 6598(define_insn "pic_load_addr_32bit" 6599 [(set (match_operand:SI 0 "s_register_operand" "=r") 6600 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))] 6601 "TARGET_32BIT && flag_pic" 6602 "ldr%?\\t%0, %1" 6603 [(set_attr "type" "load_4") 6604 (set (attr "pool_range") 6605 (if_then_else (eq_attr "is_thumb" "no") 6606 (const_int 4096) 6607 (const_int 4094))) 6608 (set (attr "neg_pool_range") 6609 (if_then_else (eq_attr "is_thumb" "no") 6610 (const_int 4084) 6611 (const_int 0)))] 6612) 6613 6614(define_insn "pic_load_addr_thumb1" 6615 [(set (match_operand:SI 0 "s_register_operand" "=l") 6616 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))] 6617 "TARGET_THUMB1 && flag_pic" 6618 "ldr\\t%0, %1" 6619 [(set_attr "type" "load_4") 6620 (set (attr "pool_range") (const_int 1018))] 6621) 6622 6623(define_insn "pic_add_dot_plus_four" 6624 [(set (match_operand:SI 0 "register_operand" "=r") 6625 (unspec:SI [(match_operand:SI 1 "register_operand" "0") 6626 (const_int 4) 6627 (match_operand 2 "" "")] 6628 UNSPEC_PIC_BASE))] 6629 "TARGET_THUMB" 6630 "* 6631 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\", 6632 INTVAL (operands[2])); 6633 return \"add\\t%0, %|pc\"; 6634 " 6635 [(set_attr "length" "2") 6636 (set_attr "type" "alu_sreg")] 6637) 6638 6639(define_insn "pic_add_dot_plus_eight" 6640 [(set (match_operand:SI 0 "register_operand" "=r") 6641 (unspec:SI [(match_operand:SI 1 "register_operand" "r") 6642 (const_int 8) 6643 (match_operand 2 "" "")] 6644 UNSPEC_PIC_BASE))] 6645 "TARGET_ARM" 6646 "* 6647 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\", 6648 INTVAL (operands[2])); 6649 return \"add%?\\t%0, %|pc, %1\"; 6650 " 6651 [(set_attr "predicable" "yes") 6652 (set_attr "type" "alu_sreg")] 6653) 6654 6655(define_insn "tls_load_dot_plus_eight" 6656 [(set (match_operand:SI 0 "register_operand" "=r") 6657 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r") 6658 (const_int 8) 6659 (match_operand 2 "" "")] 6660 UNSPEC_PIC_BASE)))] 6661 "TARGET_ARM" 6662 "* 6663 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\", 6664 INTVAL (operands[2])); 6665 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\"; 6666 " 6667 [(set_attr "predicable" "yes") 6668 (set_attr "type" "load_4")] 6669) 6670 6671;; PIC references to local variables can generate pic_add_dot_plus_eight 6672;; followed by a load. These sequences can be crunched down to 6673;; tls_load_dot_plus_eight by a peephole. 6674 6675(define_peephole2 6676 [(set (match_operand:SI 0 "register_operand" "") 6677 (unspec:SI [(match_operand:SI 3 "register_operand" "") 6678 (const_int 8) 6679 (match_operand 1 "" "")] 6680 UNSPEC_PIC_BASE)) 6681 (set (match_operand:SI 2 "arm_general_register_operand" "") 6682 (mem:SI (match_dup 0)))] 6683 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])" 6684 [(set (match_dup 2) 6685 (mem:SI (unspec:SI [(match_dup 3) 6686 (const_int 8) 6687 (match_dup 1)] 6688 UNSPEC_PIC_BASE)))] 6689 "" 6690) 6691 6692(define_insn "pic_offset_arm" 6693 [(set (match_operand:SI 0 "register_operand" "=r") 6694 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r") 6695 (unspec:SI [(match_operand:SI 2 "" "X")] 6696 UNSPEC_PIC_OFFSET))))] 6697 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic" 6698 "ldr%?\\t%0, [%1,%2]" 6699 [(set_attr "type" "load_4")] 6700) 6701 6702(define_expand "builtin_setjmp_receiver" 6703 [(label_ref (match_operand 0 "" ""))] 6704 "flag_pic" 6705 " 6706{ 6707 /* r3 is clobbered by set/longjmp, so we can use it as a scratch 6708 register. */ 6709 if (arm_pic_register != INVALID_REGNUM) 6710 arm_load_pic_register (1UL << 3, NULL_RTX); 6711 DONE; 6712}") 6713 6714;; If copying one reg to another we can set the condition codes according to 6715;; its value. Such a move is common after a return from subroutine and the 6716;; result is being tested against zero. 6717 6718(define_insn "*movsi_compare0" 6719 [(set (reg:CC CC_REGNUM) 6720 (compare:CC (match_operand:SI 1 "s_register_operand" "0,0,l,rk,rk") 6721 (const_int 0))) 6722 (set (match_operand:SI 0 "s_register_operand" "=l,rk,l,r,rk") 6723 (match_dup 1))] 6724 "TARGET_32BIT" 6725 "@ 6726 cmp%?\\t%0, #0 6727 cmp%?\\t%0, #0 6728 subs%?\\t%0, %1, #0 6729 subs%?\\t%0, %1, #0 6730 subs%?\\t%0, %1, #0" 6731 [(set_attr "conds" "set") 6732 (set_attr "arch" "t2,*,t2,t2,a") 6733 (set_attr "type" "alus_imm") 6734 (set_attr "length" "2,4,2,4,4")] 6735) 6736 6737;; Subroutine to store a half word from a register into memory. 6738;; Operand 0 is the source register (HImode) 6739;; Operand 1 is the destination address in a register (SImode) 6740 6741;; In both this routine and the next, we must be careful not to spill 6742;; a memory address of reg+large_const into a separate PLUS insn, since this 6743;; can generate unrecognizable rtl. 6744 6745(define_expand "storehi" 6746 [;; store the low byte 6747 (set (match_operand 1 "" "") (match_dup 3)) 6748 ;; extract the high byte 6749 (set (match_dup 2) 6750 (ashiftrt:SI (match_operand 0 "" "") (const_int 8))) 6751 ;; store the high byte 6752 (set (match_dup 4) (match_dup 5))] 6753 "TARGET_ARM" 6754 " 6755 { 6756 rtx op1 = operands[1]; 6757 rtx addr = XEXP (op1, 0); 6758 enum rtx_code code = GET_CODE (addr); 6759 6760 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1))) 6761 || code == MINUS) 6762 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr)); 6763 6764 operands[4] = adjust_address (op1, QImode, 1); 6765 operands[1] = adjust_address (operands[1], QImode, 0); 6766 operands[3] = gen_lowpart (QImode, operands[0]); 6767 operands[0] = gen_lowpart (SImode, operands[0]); 6768 operands[2] = gen_reg_rtx (SImode); 6769 operands[5] = gen_lowpart (QImode, operands[2]); 6770 }" 6771) 6772 6773(define_expand "storehi_bigend" 6774 [(set (match_dup 4) (match_dup 3)) 6775 (set (match_dup 2) 6776 (ashiftrt:SI (match_operand 0 "" "") (const_int 8))) 6777 (set (match_operand 1 "" "") (match_dup 5))] 6778 "TARGET_ARM" 6779 " 6780 { 6781 rtx op1 = operands[1]; 6782 rtx addr = XEXP (op1, 0); 6783 enum rtx_code code = GET_CODE (addr); 6784 6785 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1))) 6786 || code == MINUS) 6787 op1 = replace_equiv_address (op1, force_reg (SImode, addr)); 6788 6789 operands[4] = adjust_address (op1, QImode, 1); 6790 operands[1] = adjust_address (operands[1], QImode, 0); 6791 operands[3] = gen_lowpart (QImode, operands[0]); 6792 operands[0] = gen_lowpart (SImode, operands[0]); 6793 operands[2] = gen_reg_rtx (SImode); 6794 operands[5] = gen_lowpart (QImode, operands[2]); 6795 }" 6796) 6797 6798;; Subroutine to store a half word integer constant into memory. 6799(define_expand "storeinthi" 6800 [(set (match_operand 0 "" "") 6801 (match_operand 1 "" "")) 6802 (set (match_dup 3) (match_dup 2))] 6803 "TARGET_ARM" 6804 " 6805 { 6806 HOST_WIDE_INT value = INTVAL (operands[1]); 6807 rtx addr = XEXP (operands[0], 0); 6808 rtx op0 = operands[0]; 6809 enum rtx_code code = GET_CODE (addr); 6810 6811 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1))) 6812 || code == MINUS) 6813 op0 = replace_equiv_address (op0, force_reg (SImode, addr)); 6814 6815 operands[1] = gen_reg_rtx (SImode); 6816 if (BYTES_BIG_ENDIAN) 6817 { 6818 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255))); 6819 if ((value & 255) == ((value >> 8) & 255)) 6820 operands[2] = operands[1]; 6821 else 6822 { 6823 operands[2] = gen_reg_rtx (SImode); 6824 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255))); 6825 } 6826 } 6827 else 6828 { 6829 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255))); 6830 if ((value & 255) == ((value >> 8) & 255)) 6831 operands[2] = operands[1]; 6832 else 6833 { 6834 operands[2] = gen_reg_rtx (SImode); 6835 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255))); 6836 } 6837 } 6838 6839 operands[3] = adjust_address (op0, QImode, 1); 6840 operands[0] = adjust_address (operands[0], QImode, 0); 6841 operands[2] = gen_lowpart (QImode, operands[2]); 6842 operands[1] = gen_lowpart (QImode, operands[1]); 6843 }" 6844) 6845 6846(define_expand "storehi_single_op" 6847 [(set (match_operand:HI 0 "memory_operand") 6848 (match_operand:HI 1 "general_operand"))] 6849 "TARGET_32BIT && arm_arch4" 6850 " 6851 if (!s_register_operand (operands[1], HImode)) 6852 operands[1] = copy_to_mode_reg (HImode, operands[1]); 6853 " 6854) 6855 6856(define_expand "movhi" 6857 [(set (match_operand:HI 0 "general_operand") 6858 (match_operand:HI 1 "general_operand"))] 6859 "TARGET_EITHER" 6860 " 6861 gcc_checking_assert (aligned_operand (operands[0], HImode)); 6862 gcc_checking_assert (aligned_operand (operands[1], HImode)); 6863 if (TARGET_ARM) 6864 { 6865 if (can_create_pseudo_p ()) 6866 { 6867 if (MEM_P (operands[0])) 6868 { 6869 if (arm_arch4) 6870 { 6871 emit_insn (gen_storehi_single_op (operands[0], operands[1])); 6872 DONE; 6873 } 6874 if (CONST_INT_P (operands[1])) 6875 emit_insn (gen_storeinthi (operands[0], operands[1])); 6876 else 6877 { 6878 if (MEM_P (operands[1])) 6879 operands[1] = force_reg (HImode, operands[1]); 6880 if (BYTES_BIG_ENDIAN) 6881 emit_insn (gen_storehi_bigend (operands[1], operands[0])); 6882 else 6883 emit_insn (gen_storehi (operands[1], operands[0])); 6884 } 6885 DONE; 6886 } 6887 /* Sign extend a constant, and keep it in an SImode reg. */ 6888 else if (CONST_INT_P (operands[1])) 6889 { 6890 rtx reg = gen_reg_rtx (SImode); 6891 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff; 6892 6893 /* If the constant is already valid, leave it alone. */ 6894 if (!const_ok_for_arm (val)) 6895 { 6896 /* If setting all the top bits will make the constant 6897 loadable in a single instruction, then set them. 6898 Otherwise, sign extend the number. */ 6899 6900 if (const_ok_for_arm (~(val | ~0xffff))) 6901 val |= ~0xffff; 6902 else if (val & 0x8000) 6903 val |= ~0xffff; 6904 } 6905 6906 emit_insn (gen_movsi (reg, GEN_INT (val))); 6907 operands[1] = gen_lowpart (HImode, reg); 6908 } 6909 else if (arm_arch4 && optimize && can_create_pseudo_p () 6910 && MEM_P (operands[1])) 6911 { 6912 rtx reg = gen_reg_rtx (SImode); 6913 6914 emit_insn (gen_zero_extendhisi2 (reg, operands[1])); 6915 operands[1] = gen_lowpart (HImode, reg); 6916 } 6917 else if (!arm_arch4) 6918 { 6919 if (MEM_P (operands[1])) 6920 { 6921 rtx base; 6922 rtx offset = const0_rtx; 6923 rtx reg = gen_reg_rtx (SImode); 6924 6925 if ((REG_P (base = XEXP (operands[1], 0)) 6926 || (GET_CODE (base) == PLUS 6927 && (CONST_INT_P (offset = XEXP (base, 1))) 6928 && ((INTVAL(offset) & 1) != 1) 6929 && REG_P (base = XEXP (base, 0)))) 6930 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32) 6931 { 6932 rtx new_rtx; 6933 6934 new_rtx = widen_memory_access (operands[1], SImode, 6935 ((INTVAL (offset) & ~3) 6936 - INTVAL (offset))); 6937 emit_insn (gen_movsi (reg, new_rtx)); 6938 if (((INTVAL (offset) & 2) != 0) 6939 ^ (BYTES_BIG_ENDIAN ? 1 : 0)) 6940 { 6941 rtx reg2 = gen_reg_rtx (SImode); 6942 6943 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16))); 6944 reg = reg2; 6945 } 6946 } 6947 else 6948 emit_insn (gen_movhi_bytes (reg, operands[1])); 6949 6950 operands[1] = gen_lowpart (HImode, reg); 6951 } 6952 } 6953 } 6954 /* Handle loading a large integer during reload. */ 6955 else if (CONST_INT_P (operands[1]) 6956 && !const_ok_for_arm (INTVAL (operands[1])) 6957 && !const_ok_for_arm (~INTVAL (operands[1]))) 6958 { 6959 /* Writing a constant to memory needs a scratch, which should 6960 be handled with SECONDARY_RELOADs. */ 6961 gcc_assert (REG_P (operands[0])); 6962 6963 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0); 6964 emit_insn (gen_movsi (operands[0], operands[1])); 6965 DONE; 6966 } 6967 } 6968 else if (TARGET_THUMB2) 6969 { 6970 /* Thumb-2 can do everything except mem=mem and mem=const easily. */ 6971 if (can_create_pseudo_p ()) 6972 { 6973 if (!REG_P (operands[0])) 6974 operands[1] = force_reg (HImode, operands[1]); 6975 /* Zero extend a constant, and keep it in an SImode reg. */ 6976 else if (CONST_INT_P (operands[1])) 6977 { 6978 rtx reg = gen_reg_rtx (SImode); 6979 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff; 6980 6981 emit_insn (gen_movsi (reg, GEN_INT (val))); 6982 operands[1] = gen_lowpart (HImode, reg); 6983 } 6984 } 6985 } 6986 else /* TARGET_THUMB1 */ 6987 { 6988 if (can_create_pseudo_p ()) 6989 { 6990 if (CONST_INT_P (operands[1])) 6991 { 6992 rtx reg = gen_reg_rtx (SImode); 6993 6994 emit_insn (gen_movsi (reg, operands[1])); 6995 operands[1] = gen_lowpart (HImode, reg); 6996 } 6997 6998 /* ??? We shouldn't really get invalid addresses here, but this can 6999 happen if we are passed a SP (never OK for HImode/QImode) or 7000 virtual register (also rejected as illegitimate for HImode/QImode) 7001 relative address. */ 7002 /* ??? This should perhaps be fixed elsewhere, for instance, in 7003 fixup_stack_1, by checking for other kinds of invalid addresses, 7004 e.g. a bare reference to a virtual register. This may confuse the 7005 alpha though, which must handle this case differently. */ 7006 if (MEM_P (operands[0]) 7007 && !memory_address_p (GET_MODE (operands[0]), 7008 XEXP (operands[0], 0))) 7009 operands[0] 7010 = replace_equiv_address (operands[0], 7011 copy_to_reg (XEXP (operands[0], 0))); 7012 7013 if (MEM_P (operands[1]) 7014 && !memory_address_p (GET_MODE (operands[1]), 7015 XEXP (operands[1], 0))) 7016 operands[1] 7017 = replace_equiv_address (operands[1], 7018 copy_to_reg (XEXP (operands[1], 0))); 7019 7020 if (MEM_P (operands[1]) && optimize > 0) 7021 { 7022 rtx reg = gen_reg_rtx (SImode); 7023 7024 emit_insn (gen_zero_extendhisi2 (reg, operands[1])); 7025 operands[1] = gen_lowpart (HImode, reg); 7026 } 7027 7028 if (MEM_P (operands[0])) 7029 operands[1] = force_reg (HImode, operands[1]); 7030 } 7031 else if (CONST_INT_P (operands[1]) 7032 && !satisfies_constraint_I (operands[1])) 7033 { 7034 /* Handle loading a large integer during reload. */ 7035 7036 /* Writing a constant to memory needs a scratch, which should 7037 be handled with SECONDARY_RELOADs. */ 7038 gcc_assert (REG_P (operands[0])); 7039 7040 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0); 7041 emit_insn (gen_movsi (operands[0], operands[1])); 7042 DONE; 7043 } 7044 } 7045 " 7046) 7047 7048(define_expand "movhi_bytes" 7049 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" ""))) 7050 (set (match_dup 3) 7051 (zero_extend:SI (match_dup 6))) 7052 (set (match_operand:SI 0 "" "") 7053 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))] 7054 "TARGET_ARM" 7055 " 7056 { 7057 rtx mem1, mem2; 7058 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0)); 7059 7060 mem1 = change_address (operands[1], QImode, addr); 7061 mem2 = change_address (operands[1], QImode, 7062 plus_constant (Pmode, addr, 1)); 7063 operands[0] = gen_lowpart (SImode, operands[0]); 7064 operands[1] = mem1; 7065 operands[2] = gen_reg_rtx (SImode); 7066 operands[3] = gen_reg_rtx (SImode); 7067 operands[6] = mem2; 7068 7069 if (BYTES_BIG_ENDIAN) 7070 { 7071 operands[4] = operands[2]; 7072 operands[5] = operands[3]; 7073 } 7074 else 7075 { 7076 operands[4] = operands[3]; 7077 operands[5] = operands[2]; 7078 } 7079 }" 7080) 7081 7082(define_expand "movhi_bigend" 7083 [(set (match_dup 2) 7084 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0) 7085 (const_int 16))) 7086 (set (match_dup 3) 7087 (ashiftrt:SI (match_dup 2) (const_int 16))) 7088 (set (match_operand:HI 0 "s_register_operand") 7089 (match_dup 4))] 7090 "TARGET_ARM" 7091 " 7092 operands[2] = gen_reg_rtx (SImode); 7093 operands[3] = gen_reg_rtx (SImode); 7094 operands[4] = gen_lowpart (HImode, operands[3]); 7095 " 7096) 7097 7098;; Pattern to recognize insn generated default case above 7099(define_insn "*movhi_insn_arch4" 7100 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r") 7101 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))] 7102 "TARGET_ARM 7103 && arm_arch4 && !TARGET_HARD_FLOAT 7104 && (register_operand (operands[0], HImode) 7105 || register_operand (operands[1], HImode))" 7106 "@ 7107 mov%?\\t%0, %1\\t%@ movhi 7108 mvn%?\\t%0, #%B1\\t%@ movhi 7109 movw%?\\t%0, %L1\\t%@ movhi 7110 strh%?\\t%1, %0\\t%@ movhi 7111 ldrh%?\\t%0, %1\\t%@ movhi" 7112 [(set_attr "predicable" "yes") 7113 (set_attr "pool_range" "*,*,*,*,256") 7114 (set_attr "neg_pool_range" "*,*,*,*,244") 7115 (set_attr "arch" "*,*,v6t2,*,*") 7116 (set_attr_alternative "type" 7117 [(if_then_else (match_operand 1 "const_int_operand" "") 7118 (const_string "mov_imm" ) 7119 (const_string "mov_reg")) 7120 (const_string "mvn_imm") 7121 (const_string "mov_imm") 7122 (const_string "store_4") 7123 (const_string "load_4")])] 7124) 7125 7126(define_insn "*movhi_bytes" 7127 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r") 7128 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))] 7129 "TARGET_ARM && !TARGET_HARD_FLOAT" 7130 "@ 7131 mov%?\\t%0, %1\\t%@ movhi 7132 mov%?\\t%0, %1\\t%@ movhi 7133 mvn%?\\t%0, #%B1\\t%@ movhi" 7134 [(set_attr "predicable" "yes") 7135 (set_attr "type" "mov_imm,mov_reg,mvn_imm")] 7136) 7137 7138;; We use a DImode scratch because we may occasionally need an additional 7139;; temporary if the address isn't offsettable -- push_reload doesn't seem 7140;; to take any notice of the "o" constraints on reload_memory_operand operand. 7141;; The reload_in<m> and reload_out<m> patterns require special constraints 7142;; to be correctly handled in default_secondary_reload function. 7143(define_expand "reload_outhi" 7144 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o") 7145 (match_operand:HI 1 "s_register_operand" "r") 7146 (match_operand:DI 2 "s_register_operand" "=&l")])] 7147 "TARGET_EITHER" 7148 "if (TARGET_ARM) 7149 arm_reload_out_hi (operands); 7150 else 7151 thumb_reload_out_hi (operands); 7152 DONE; 7153 " 7154) 7155 7156(define_expand "reload_inhi" 7157 [(parallel [(match_operand:HI 0 "s_register_operand" "=r") 7158 (match_operand:HI 1 "arm_reload_memory_operand" "o") 7159 (match_operand:DI 2 "s_register_operand" "=&r")])] 7160 "TARGET_EITHER" 7161 " 7162 if (TARGET_ARM) 7163 arm_reload_in_hi (operands); 7164 else 7165 thumb_reload_out_hi (operands); 7166 DONE; 7167") 7168 7169(define_expand "movqi" 7170 [(set (match_operand:QI 0 "general_operand") 7171 (match_operand:QI 1 "general_operand"))] 7172 "TARGET_EITHER" 7173 " 7174 /* Everything except mem = const or mem = mem can be done easily */ 7175 7176 if (can_create_pseudo_p ()) 7177 { 7178 if (CONST_INT_P (operands[1])) 7179 { 7180 rtx reg = gen_reg_rtx (SImode); 7181 7182 /* For thumb we want an unsigned immediate, then we are more likely 7183 to be able to use a movs insn. */ 7184 if (TARGET_THUMB) 7185 operands[1] = GEN_INT (INTVAL (operands[1]) & 255); 7186 7187 emit_insn (gen_movsi (reg, operands[1])); 7188 operands[1] = gen_lowpart (QImode, reg); 7189 } 7190 7191 if (TARGET_THUMB) 7192 { 7193 /* ??? We shouldn't really get invalid addresses here, but this can 7194 happen if we are passed a SP (never OK for HImode/QImode) or 7195 virtual register (also rejected as illegitimate for HImode/QImode) 7196 relative address. */ 7197 /* ??? This should perhaps be fixed elsewhere, for instance, in 7198 fixup_stack_1, by checking for other kinds of invalid addresses, 7199 e.g. a bare reference to a virtual register. This may confuse the 7200 alpha though, which must handle this case differently. */ 7201 if (MEM_P (operands[0]) 7202 && !memory_address_p (GET_MODE (operands[0]), 7203 XEXP (operands[0], 0))) 7204 operands[0] 7205 = replace_equiv_address (operands[0], 7206 copy_to_reg (XEXP (operands[0], 0))); 7207 if (MEM_P (operands[1]) 7208 && !memory_address_p (GET_MODE (operands[1]), 7209 XEXP (operands[1], 0))) 7210 operands[1] 7211 = replace_equiv_address (operands[1], 7212 copy_to_reg (XEXP (operands[1], 0))); 7213 } 7214 7215 if (MEM_P (operands[1]) && optimize > 0) 7216 { 7217 rtx reg = gen_reg_rtx (SImode); 7218 7219 emit_insn (gen_zero_extendqisi2 (reg, operands[1])); 7220 operands[1] = gen_lowpart (QImode, reg); 7221 } 7222 7223 if (MEM_P (operands[0])) 7224 operands[1] = force_reg (QImode, operands[1]); 7225 } 7226 else if (TARGET_THUMB 7227 && CONST_INT_P (operands[1]) 7228 && !satisfies_constraint_I (operands[1])) 7229 { 7230 /* Handle loading a large integer during reload. */ 7231 7232 /* Writing a constant to memory needs a scratch, which should 7233 be handled with SECONDARY_RELOADs. */ 7234 gcc_assert (REG_P (operands[0])); 7235 7236 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0); 7237 emit_insn (gen_movsi (operands[0], operands[1])); 7238 DONE; 7239 } 7240 " 7241) 7242 7243(define_insn "*arm_movqi_insn" 7244 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m") 7245 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))] 7246 "TARGET_32BIT 7247 && ( register_operand (operands[0], QImode) 7248 || register_operand (operands[1], QImode))" 7249 "@ 7250 mov%?\\t%0, %1 7251 mov%?\\t%0, %1 7252 mov%?\\t%0, %1 7253 mov%?\\t%0, %1 7254 mvn%?\\t%0, #%B1 7255 ldrb%?\\t%0, %1 7256 strb%?\\t%1, %0 7257 ldrb%?\\t%0, %1 7258 strb%?\\t%1, %0" 7259 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4") 7260 (set_attr "predicable" "yes") 7261 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no") 7262 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any") 7263 (set_attr "length" "2,4,4,2,4,2,2,4,4")] 7264) 7265 7266;; HFmode and BFmode moves. 7267(define_expand "mov<mode>" 7268 [(set (match_operand:HFBF 0 "general_operand") 7269 (match_operand:HFBF 1 "general_operand"))] 7270 "TARGET_EITHER" 7271 " 7272 gcc_checking_assert (aligned_operand (operands[0], <MODE>mode)); 7273 gcc_checking_assert (aligned_operand (operands[1], <MODE>mode)); 7274 if (TARGET_32BIT) 7275 { 7276 if (MEM_P (operands[0])) 7277 operands[1] = force_reg (<MODE>mode, operands[1]); 7278 } 7279 else /* TARGET_THUMB1 */ 7280 { 7281 if (can_create_pseudo_p ()) 7282 { 7283 if (!REG_P (operands[0])) 7284 operands[1] = force_reg (<MODE>mode, operands[1]); 7285 } 7286 } 7287 " 7288) 7289 7290(define_insn "*arm32_mov<mode>" 7291 [(set (match_operand:HFBF 0 "nonimmediate_operand" "=r,m,r,r") 7292 (match_operand:HFBF 1 "general_operand" " m,r,r,F"))] 7293 "TARGET_32BIT 7294 && !TARGET_HARD_FLOAT 7295 && !TARGET_HAVE_MVE 7296 && ( s_register_operand (operands[0], <MODE>mode) 7297 || s_register_operand (operands[1], <MODE>mode))" 7298 "* 7299 switch (which_alternative) 7300 { 7301 case 0: /* ARM register from memory */ 7302 return \"ldrh%?\\t%0, %1\\t%@ __<fporbf>\"; 7303 case 1: /* memory from ARM register */ 7304 return \"strh%?\\t%1, %0\\t%@ __<fporbf>\"; 7305 case 2: /* ARM register from ARM register */ 7306 return \"mov%?\\t%0, %1\\t%@ __<fporbf>\"; 7307 case 3: /* ARM register from constant */ 7308 { 7309 long bits; 7310 rtx ops[4]; 7311 7312 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]), 7313 <MODE>mode); 7314 ops[0] = operands[0]; 7315 ops[1] = GEN_INT (bits); 7316 ops[2] = GEN_INT (bits & 0xff00); 7317 ops[3] = GEN_INT (bits & 0x00ff); 7318 7319 if (arm_arch_thumb2) 7320 output_asm_insn (\"movw%?\\t%0, %1\", ops); 7321 else 7322 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops); 7323 return \"\"; 7324 } 7325 default: 7326 gcc_unreachable (); 7327 } 7328 " 7329 [(set_attr "conds" "unconditional") 7330 (set_attr "type" "load_4,store_4,mov_reg,multiple") 7331 (set_attr "length" "4,4,4,8") 7332 (set_attr "predicable" "yes")] 7333) 7334 7335(define_expand "movsf" 7336 [(set (match_operand:SF 0 "general_operand") 7337 (match_operand:SF 1 "general_operand"))] 7338 "TARGET_EITHER" 7339 " 7340 gcc_checking_assert (aligned_operand (operands[0], SFmode)); 7341 gcc_checking_assert (aligned_operand (operands[1], SFmode)); 7342 if (TARGET_32BIT) 7343 { 7344 if (MEM_P (operands[0])) 7345 operands[1] = force_reg (SFmode, operands[1]); 7346 } 7347 else /* TARGET_THUMB1 */ 7348 { 7349 if (can_create_pseudo_p ()) 7350 { 7351 if (!REG_P (operands[0])) 7352 operands[1] = force_reg (SFmode, operands[1]); 7353 } 7354 } 7355 7356 /* Cannot load it directly, generate a load with clobber so that it can be 7357 loaded via GPR with MOV / MOVT. */ 7358 if (arm_disable_literal_pool 7359 && (REG_P (operands[0]) || SUBREG_P (operands[0])) 7360 && CONST_DOUBLE_P (operands[1]) 7361 && TARGET_VFP_BASE 7362 && !vfp3_const_double_rtx (operands[1])) 7363 { 7364 rtx clobreg = gen_reg_rtx (SFmode); 7365 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1], 7366 clobreg)); 7367 DONE; 7368 } 7369 " 7370) 7371 7372;; Transform a floating-point move of a constant into a core register into 7373;; an SImode operation. 7374(define_split 7375 [(set (match_operand:SF 0 "arm_general_register_operand" "") 7376 (match_operand:SF 1 "immediate_operand" ""))] 7377 "TARGET_EITHER 7378 && reload_completed 7379 && CONST_DOUBLE_P (operands[1])" 7380 [(set (match_dup 2) (match_dup 3))] 7381 " 7382 operands[2] = gen_lowpart (SImode, operands[0]); 7383 operands[3] = gen_lowpart (SImode, operands[1]); 7384 if (operands[2] == 0 || operands[3] == 0) 7385 FAIL; 7386 " 7387) 7388 7389(define_insn "*arm_movsf_soft_insn" 7390 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m") 7391 (match_operand:SF 1 "general_operand" "r,mE,r"))] 7392 "TARGET_32BIT 7393 && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE 7394 && (!MEM_P (operands[0]) 7395 || register_operand (operands[1], SFmode))" 7396{ 7397 switch (which_alternative) 7398 { 7399 case 0: return \"mov%?\\t%0, %1\"; 7400 case 1: 7401 /* Cannot load it directly, split to load it via MOV / MOVT. */ 7402 if (!MEM_P (operands[1]) && arm_disable_literal_pool) 7403 return \"#\"; 7404 return \"ldr%?\\t%0, %1\\t%@ float\"; 7405 case 2: return \"str%?\\t%1, %0\\t%@ float\"; 7406 default: gcc_unreachable (); 7407 } 7408} 7409 [(set_attr "predicable" "yes") 7410 (set_attr "type" "mov_reg,load_4,store_4") 7411 (set_attr "arm_pool_range" "*,4096,*") 7412 (set_attr "thumb2_pool_range" "*,4094,*") 7413 (set_attr "arm_neg_pool_range" "*,4084,*") 7414 (set_attr "thumb2_neg_pool_range" "*,0,*")] 7415) 7416 7417;; Splitter for the above. 7418(define_split 7419 [(set (match_operand:SF 0 "s_register_operand") 7420 (match_operand:SF 1 "const_double_operand"))] 7421 "arm_disable_literal_pool && TARGET_SOFT_FLOAT" 7422 [(const_int 0)] 7423{ 7424 long buf; 7425 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode); 7426 rtx cst = gen_int_mode (buf, SImode); 7427 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst); 7428 DONE; 7429} 7430) 7431 7432(define_expand "movdf" 7433 [(set (match_operand:DF 0 "general_operand") 7434 (match_operand:DF 1 "general_operand"))] 7435 "TARGET_EITHER" 7436 " 7437 gcc_checking_assert (aligned_operand (operands[0], DFmode)); 7438 gcc_checking_assert (aligned_operand (operands[1], DFmode)); 7439 if (TARGET_32BIT) 7440 { 7441 if (MEM_P (operands[0])) 7442 operands[1] = force_reg (DFmode, operands[1]); 7443 } 7444 else /* TARGET_THUMB */ 7445 { 7446 if (can_create_pseudo_p ()) 7447 { 7448 if (!REG_P (operands[0])) 7449 operands[1] = force_reg (DFmode, operands[1]); 7450 } 7451 } 7452 7453 /* Cannot load it directly, generate a load with clobber so that it can be 7454 loaded via GPR with MOV / MOVT. */ 7455 if (arm_disable_literal_pool 7456 && (REG_P (operands[0]) || SUBREG_P (operands[0])) 7457 && CONSTANT_P (operands[1]) 7458 && TARGET_VFP_BASE 7459 && !arm_const_double_rtx (operands[1]) 7460 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1]))) 7461 { 7462 rtx clobreg = gen_reg_rtx (DFmode); 7463 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1], 7464 clobreg)); 7465 DONE; 7466 } 7467 " 7468) 7469 7470;; Reloading a df mode value stored in integer regs to memory can require a 7471;; scratch reg. 7472;; Another reload_out<m> pattern that requires special constraints. 7473(define_expand "reload_outdf" 7474 [(match_operand:DF 0 "arm_reload_memory_operand" "=o") 7475 (match_operand:DF 1 "s_register_operand" "r") 7476 (match_operand:SI 2 "s_register_operand" "=&r")] 7477 "TARGET_THUMB2" 7478 " 7479 { 7480 enum rtx_code code = GET_CODE (XEXP (operands[0], 0)); 7481 7482 if (code == REG) 7483 operands[2] = XEXP (operands[0], 0); 7484 else if (code == POST_INC || code == PRE_DEC) 7485 { 7486 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0); 7487 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0); 7488 emit_insn (gen_movdi (operands[0], operands[1])); 7489 DONE; 7490 } 7491 else if (code == PRE_INC) 7492 { 7493 rtx reg = XEXP (XEXP (operands[0], 0), 0); 7494 7495 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8))); 7496 operands[2] = reg; 7497 } 7498 else if (code == POST_DEC) 7499 operands[2] = XEXP (XEXP (operands[0], 0), 0); 7500 else 7501 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0), 7502 XEXP (XEXP (operands[0], 0), 1))); 7503 7504 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]), 7505 operands[1])); 7506 7507 if (code == POST_DEC) 7508 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8))); 7509 7510 DONE; 7511 }" 7512) 7513 7514(define_insn "*movdf_soft_insn" 7515 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m") 7516 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))] 7517 "TARGET_32BIT && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE 7518 && ( register_operand (operands[0], DFmode) 7519 || register_operand (operands[1], DFmode))" 7520 "* 7521 switch (which_alternative) 7522 { 7523 case 0: 7524 case 1: 7525 case 2: 7526 return \"#\"; 7527 case 3: 7528 /* Cannot load it directly, split to load it via MOV / MOVT. */ 7529 if (!MEM_P (operands[1]) && arm_disable_literal_pool) 7530 return \"#\"; 7531 /* Fall through. */ 7532 default: 7533 return output_move_double (operands, true, NULL); 7534 } 7535 " 7536 [(set_attr "length" "8,12,16,8,8") 7537 (set_attr "type" "multiple,multiple,multiple,load_8,store_8") 7538 (set_attr "arm_pool_range" "*,*,*,1020,*") 7539 (set_attr "thumb2_pool_range" "*,*,*,1018,*") 7540 (set_attr "arm_neg_pool_range" "*,*,*,1004,*") 7541 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")] 7542) 7543 7544;; Splitter for the above. 7545(define_split 7546 [(set (match_operand:DF 0 "s_register_operand") 7547 (match_operand:DF 1 "const_double_operand"))] 7548 "arm_disable_literal_pool && TARGET_SOFT_FLOAT" 7549 [(const_int 0)] 7550{ 7551 long buf[2]; 7552 int order = BYTES_BIG_ENDIAN ? 1 : 0; 7553 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode); 7554 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32); 7555 ival |= (zext_hwi (buf[1 - order], 32) << 32); 7556 rtx cst = gen_int_mode (ival, DImode); 7557 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst); 7558 DONE; 7559} 7560) 7561 7562 7563;; load- and store-multiple insns 7564;; The arm can load/store any set of registers, provided that they are in 7565;; ascending order, but these expanders assume a contiguous set. 7566 7567(define_expand "load_multiple" 7568 [(match_par_dup 3 [(set (match_operand:SI 0 "" "") 7569 (match_operand:SI 1 "" "")) 7570 (use (match_operand:SI 2 "" ""))])] 7571 "TARGET_32BIT" 7572{ 7573 HOST_WIDE_INT offset = 0; 7574 7575 /* Support only fixed point registers. */ 7576 if (!CONST_INT_P (operands[2]) 7577 || INTVAL (operands[2]) > MAX_LDM_STM_OPS 7578 || INTVAL (operands[2]) < 2 7579 || !MEM_P (operands[1]) 7580 || !REG_P (operands[0]) 7581 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1) 7582 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM) 7583 FAIL; 7584 7585 operands[3] 7586 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]), 7587 INTVAL (operands[2]), 7588 force_reg (SImode, XEXP (operands[1], 0)), 7589 FALSE, operands[1], &offset); 7590}) 7591 7592(define_expand "store_multiple" 7593 [(match_par_dup 3 [(set (match_operand:SI 0 "" "") 7594 (match_operand:SI 1 "" "")) 7595 (use (match_operand:SI 2 "" ""))])] 7596 "TARGET_32BIT" 7597{ 7598 HOST_WIDE_INT offset = 0; 7599 7600 /* Support only fixed point registers. */ 7601 if (!CONST_INT_P (operands[2]) 7602 || INTVAL (operands[2]) > MAX_LDM_STM_OPS 7603 || INTVAL (operands[2]) < 2 7604 || !REG_P (operands[1]) 7605 || !MEM_P (operands[0]) 7606 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1) 7607 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM) 7608 FAIL; 7609 7610 operands[3] 7611 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]), 7612 INTVAL (operands[2]), 7613 force_reg (SImode, XEXP (operands[0], 0)), 7614 FALSE, operands[0], &offset); 7615}) 7616 7617 7618(define_expand "setmemsi" 7619 [(match_operand:BLK 0 "general_operand") 7620 (match_operand:SI 1 "const_int_operand") 7621 (match_operand:SI 2 "const_int_operand") 7622 (match_operand:SI 3 "const_int_operand")] 7623 "TARGET_32BIT" 7624{ 7625 if (arm_gen_setmem (operands)) 7626 DONE; 7627 7628 FAIL; 7629}) 7630 7631 7632;; Move a block of memory if it is word aligned and MORE than 2 words long. 7633;; We could let this apply for blocks of less than this, but it clobbers so 7634;; many registers that there is then probably a better way. 7635 7636(define_expand "cpymemqi" 7637 [(match_operand:BLK 0 "general_operand") 7638 (match_operand:BLK 1 "general_operand") 7639 (match_operand:SI 2 "const_int_operand") 7640 (match_operand:SI 3 "const_int_operand")] 7641 "" 7642 " 7643 if (TARGET_32BIT) 7644 { 7645 if (TARGET_LDRD && current_tune->prefer_ldrd_strd 7646 && !optimize_function_for_size_p (cfun)) 7647 { 7648 if (gen_cpymem_ldrd_strd (operands)) 7649 DONE; 7650 FAIL; 7651 } 7652 7653 if (arm_gen_cpymemqi (operands)) 7654 DONE; 7655 FAIL; 7656 } 7657 else /* TARGET_THUMB1 */ 7658 { 7659 if ( INTVAL (operands[3]) != 4 7660 || INTVAL (operands[2]) > 48) 7661 FAIL; 7662 7663 thumb_expand_cpymemqi (operands); 7664 DONE; 7665 } 7666 " 7667) 7668 7669 7670;; Compare & branch insns 7671;; The range calculations are based as follows: 7672;; For forward branches, the address calculation returns the address of 7673;; the next instruction. This is 2 beyond the branch instruction. 7674;; For backward branches, the address calculation returns the address of 7675;; the first instruction in this pattern (cmp). This is 2 before the branch 7676;; instruction for the shortest sequence, and 4 before the branch instruction 7677;; if we have to jump around an unconditional branch. 7678;; To the basic branch range the PC offset must be added (this is +4). 7679;; So for forward branches we have 7680;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4). 7681;; And for backward branches we have 7682;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4). 7683;; 7684;; In 16-bit Thumb these ranges are: 7685;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048). 7686;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256). 7687 7688;; In 32-bit Thumb these ranges are: 7689;; For a 'b' +/- 16MB is not checked for. 7690;; For a 'b<cond>' pos_range = 1048574, neg_range = -1048576 giving 7691;; (-1048568 -> 1048576). 7692 7693(define_expand "cbranchsi4" 7694 [(set (pc) (if_then_else 7695 (match_operator 0 "expandable_comparison_operator" 7696 [(match_operand:SI 1 "s_register_operand") 7697 (match_operand:SI 2 "nonmemory_operand")]) 7698 (label_ref (match_operand 3 "" "")) 7699 (pc)))] 7700 "TARGET_EITHER" 7701 " 7702 if (!TARGET_THUMB1) 7703 { 7704 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2])) 7705 FAIL; 7706 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2], 7707 operands[3])); 7708 DONE; 7709 } 7710 if (thumb1_cmpneg_operand (operands[2], SImode)) 7711 { 7712 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2], 7713 operands[3], operands[0])); 7714 DONE; 7715 } 7716 if (!thumb1_cmp_operand (operands[2], SImode)) 7717 operands[2] = force_reg (SImode, operands[2]); 7718 ") 7719 7720(define_expand "cbranchsf4" 7721 [(set (pc) (if_then_else 7722 (match_operator 0 "expandable_comparison_operator" 7723 [(match_operand:SF 1 "s_register_operand") 7724 (match_operand:SF 2 "vfp_compare_operand")]) 7725 (label_ref (match_operand 3 "" "")) 7726 (pc)))] 7727 "TARGET_32BIT && TARGET_HARD_FLOAT" 7728 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2], 7729 operands[3])); DONE;" 7730) 7731 7732(define_expand "cbranchdf4" 7733 [(set (pc) (if_then_else 7734 (match_operator 0 "expandable_comparison_operator" 7735 [(match_operand:DF 1 "s_register_operand") 7736 (match_operand:DF 2 "vfp_compare_operand")]) 7737 (label_ref (match_operand 3 "" "")) 7738 (pc)))] 7739 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE" 7740 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2], 7741 operands[3])); DONE;" 7742) 7743 7744(define_expand "cbranchdi4" 7745 [(set (pc) (if_then_else 7746 (match_operator 0 "expandable_comparison_operator" 7747 [(match_operand:DI 1 "s_register_operand") 7748 (match_operand:DI 2 "reg_or_int_operand")]) 7749 (label_ref (match_operand 3 "" "")) 7750 (pc)))] 7751 "TARGET_32BIT" 7752 "{ 7753 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2])) 7754 FAIL; 7755 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2], 7756 operands[3])); 7757 DONE; 7758 }" 7759) 7760 7761;; Comparison and test insns 7762 7763(define_insn "*arm_cmpsi_insn" 7764 [(set (reg:CC CC_REGNUM) 7765 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r") 7766 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))] 7767 "TARGET_32BIT" 7768 "@ 7769 cmp%?\\t%0, %1 7770 cmp%?\\t%0, %1 7771 cmp%?\\t%0, %1 7772 cmp%?\\t%0, %1 7773 cmn%?\\t%0, #%n1" 7774 [(set_attr "conds" "set") 7775 (set_attr "arch" "t2,t2,any,any,any") 7776 (set_attr "length" "2,2,4,4,4") 7777 (set_attr "predicable" "yes") 7778 (set_attr "predicable_short_it" "yes,yes,yes,no,no") 7779 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")] 7780) 7781 7782(define_insn "*cmpsi_shiftsi" 7783 [(set (reg:CC CC_REGNUM) 7784 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r") 7785 (match_operator:SI 3 "shift_operator" 7786 [(match_operand:SI 1 "s_register_operand" "r,r") 7787 (match_operand:SI 2 "shift_amount_operand" "M,r")])))] 7788 "TARGET_32BIT" 7789 "cmp\\t%0, %1%S3" 7790 [(set_attr "conds" "set") 7791 (set_attr "shift" "1") 7792 (set_attr "arch" "32,a") 7793 (set_attr "type" "alus_shift_imm,alus_shift_reg")]) 7794 7795(define_insn "*cmpsi_shiftsi_swp" 7796 [(set (reg:CC_SWP CC_REGNUM) 7797 (compare:CC_SWP (match_operator:SI 3 "shift_operator" 7798 [(match_operand:SI 1 "s_register_operand" "r,r") 7799 (match_operand:SI 2 "shift_amount_operand" "M,r")]) 7800 (match_operand:SI 0 "s_register_operand" "r,r")))] 7801 "TARGET_32BIT" 7802 "cmp%?\\t%0, %1%S3" 7803 [(set_attr "conds" "set") 7804 (set_attr "shift" "1") 7805 (set_attr "arch" "32,a") 7806 (set_attr "type" "alus_shift_imm,alus_shift_reg")]) 7807 7808(define_insn "*arm_cmpsi_negshiftsi_si" 7809 [(set (reg:CC_Z CC_REGNUM) 7810 (compare:CC_Z 7811 (neg:SI (match_operator:SI 1 "shift_operator" 7812 [(match_operand:SI 2 "s_register_operand" "r,r") 7813 (match_operand:SI 3 "shift_amount_operand" "M,r")])) 7814 (match_operand:SI 0 "s_register_operand" "r,r")))] 7815 "TARGET_32BIT" 7816 "cmn%?\\t%0, %2%S1" 7817 [(set_attr "conds" "set") 7818 (set_attr "arch" "32,a") 7819 (set_attr "shift" "2") 7820 (set_attr "type" "alus_shift_imm,alus_shift_reg") 7821 (set_attr "predicable" "yes")] 7822) 7823 7824; This insn allows redundant compares to be removed by cse, nothing should 7825; ever appear in the output file since (set (reg x) (reg x)) is a no-op that 7826; is deleted later on. The match_dup will match the mode here, so that 7827; mode changes of the condition codes aren't lost by this even though we don't 7828; specify what they are. 7829 7830(define_insn "*deleted_compare" 7831 [(set (match_operand 0 "cc_register" "") (match_dup 0))] 7832 "TARGET_32BIT" 7833 "\\t%@ deleted compare" 7834 [(set_attr "conds" "set") 7835 (set_attr "length" "0") 7836 (set_attr "type" "no_insn")] 7837) 7838 7839 7840;; Conditional branch insns 7841 7842(define_expand "cbranch_cc" 7843 [(set (pc) 7844 (if_then_else (match_operator 0 "" [(match_operand 1 "" "") 7845 (match_operand 2 "" "")]) 7846 (label_ref (match_operand 3 "" "")) 7847 (pc)))] 7848 "TARGET_32BIT" 7849 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]), 7850 operands[1], operands[2], NULL_RTX); 7851 operands[2] = const0_rtx;" 7852) 7853 7854;; 7855;; Patterns to match conditional branch insns. 7856;; 7857 7858(define_insn "arm_cond_branch" 7859 [(set (pc) 7860 (if_then_else (match_operator 1 "arm_comparison_operator" 7861 [(match_operand 2 "cc_register" "") (const_int 0)]) 7862 (label_ref (match_operand 0 "" "")) 7863 (pc)))] 7864 "TARGET_32BIT" 7865 { 7866 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2) 7867 { 7868 arm_ccfsm_state += 2; 7869 return ""; 7870 } 7871 switch (get_attr_length (insn)) 7872 { 7873 case 2: /* Thumb2 16-bit b{cond}. */ 7874 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */ 7875 return "b%d1\t%l0"; 7876 break; 7877 7878 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and 7879 unconditional branch b. */ 7880 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%D1\t"); 7881 } 7882 } 7883 [(set_attr "conds" "use") 7884 (set_attr "type" "branch") 7885 (set (attr "length") 7886 (if_then_else (match_test "!TARGET_THUMB2") 7887 7888 ;;Target is not Thumb2, therefore is A32. Generate b{cond}. 7889 (const_int 4) 7890 7891 ;; Check if target is within 16-bit Thumb2 b{cond} range. 7892 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250)) 7893 (le (minus (match_dup 0) (pc)) (const_int 256))) 7894 7895 ;; Target is Thumb2, within narrow range. 7896 ;; Generate b{cond}. 7897 (const_int 2) 7898 7899 ;; Check if target is within 32-bit Thumb2 b{cond} range. 7900 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568)) 7901 (le (minus (match_dup 0) (pc)) (const_int 1048576))) 7902 7903 ;; Target is Thumb2, within wide range. 7904 ;; Generate b{cond} 7905 (const_int 4) 7906 ;; Target is Thumb2, out of range. 7907 ;; Generate narrow b{cond} and unconditional branch b. 7908 (const_int 6)))))] 7909) 7910 7911(define_insn "*arm_cond_branch_reversed" 7912 [(set (pc) 7913 (if_then_else (match_operator 1 "arm_comparison_operator" 7914 [(match_operand 2 "cc_register" "") (const_int 0)]) 7915 (pc) 7916 (label_ref (match_operand 0 "" ""))))] 7917 "TARGET_32BIT" 7918 { 7919 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2) 7920 { 7921 arm_ccfsm_state += 2; 7922 return ""; 7923 } 7924 switch (get_attr_length (insn)) 7925 { 7926 case 2: /* Thumb2 16-bit b{cond}. */ 7927 case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}. */ 7928 return "b%D1\t%l0"; 7929 break; 7930 7931 /* Thumb2 b{cond} out of range. Use 16-bit b{cond} and 7932 unconditional branch b. */ 7933 default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%d1\t"); 7934 } 7935 } 7936 [(set_attr "conds" "use") 7937 (set_attr "type" "branch") 7938 (set (attr "length") 7939 (if_then_else (match_test "!TARGET_THUMB2") 7940 7941 ;;Target is not Thumb2, therefore is A32. Generate b{cond}. 7942 (const_int 4) 7943 7944 ;; Check if target is within 16-bit Thumb2 b{cond} range. 7945 (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250)) 7946 (le (minus (match_dup 0) (pc)) (const_int 256))) 7947 7948 ;; Target is Thumb2, within narrow range. 7949 ;; Generate b{cond}. 7950 (const_int 2) 7951 7952 ;; Check if target is within 32-bit Thumb2 b{cond} range. 7953 (if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568)) 7954 (le (minus (match_dup 0) (pc)) (const_int 1048576))) 7955 7956 ;; Target is Thumb2, within wide range. 7957 ;; Generate b{cond}. 7958 (const_int 4) 7959 ;; Target is Thumb2, out of range. 7960 ;; Generate narrow b{cond} and unconditional branch b. 7961 (const_int 6)))))] 7962) 7963 7964 7965 7966; scc insns 7967 7968(define_expand "cstore_cc" 7969 [(set (match_operand:SI 0 "s_register_operand") 7970 (match_operator:SI 1 "" [(match_operand 2 "" "") 7971 (match_operand 3 "" "")]))] 7972 "TARGET_32BIT" 7973 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]), 7974 operands[2], operands[3], NULL_RTX); 7975 operands[3] = const0_rtx;" 7976) 7977 7978(define_insn_and_split "*mov_scc" 7979 [(set (match_operand:SI 0 "s_register_operand" "=r") 7980 (match_operator:SI 1 "arm_comparison_operator_mode" 7981 [(match_operand 2 "cc_register" "") (const_int 0)]))] 7982 "TARGET_ARM" 7983 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1" 7984 "TARGET_ARM" 7985 [(set (match_dup 0) 7986 (if_then_else:SI (match_dup 1) 7987 (const_int 1) 7988 (const_int 0)))] 7989 "" 7990 [(set_attr "conds" "use") 7991 (set_attr "length" "8") 7992 (set_attr "type" "multiple")] 7993) 7994 7995(define_insn "*negscc_borrow" 7996 [(set (match_operand:SI 0 "s_register_operand" "=r") 7997 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))] 7998 "TARGET_32BIT" 7999 "sbc\\t%0, %0, %0" 8000 [(set_attr "conds" "use") 8001 (set_attr "length" "4") 8002 (set_attr "type" "adc_reg")] 8003) 8004 8005(define_insn_and_split "*mov_negscc" 8006 [(set (match_operand:SI 0 "s_register_operand" "=r") 8007 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode" 8008 [(match_operand 2 "cc_register" "") (const_int 0)])))] 8009 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)" 8010 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0" 8011 "&& true" 8012 [(set (match_dup 0) 8013 (if_then_else:SI (match_dup 1) 8014 (match_dup 3) 8015 (const_int 0)))] 8016 { 8017 operands[3] = GEN_INT (~0); 8018 } 8019 [(set_attr "conds" "use") 8020 (set_attr "length" "8") 8021 (set_attr "type" "multiple")] 8022) 8023 8024(define_insn_and_split "*mov_notscc" 8025 [(set (match_operand:SI 0 "s_register_operand" "=r") 8026 (not:SI (match_operator:SI 1 "arm_comparison_operator" 8027 [(match_operand 2 "cc_register" "") (const_int 0)])))] 8028 "TARGET_ARM" 8029 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1" 8030 "TARGET_ARM" 8031 [(set (match_dup 0) 8032 (if_then_else:SI (match_dup 1) 8033 (match_dup 3) 8034 (match_dup 4)))] 8035 { 8036 operands[3] = GEN_INT (~1); 8037 operands[4] = GEN_INT (~0); 8038 } 8039 [(set_attr "conds" "use") 8040 (set_attr "length" "8") 8041 (set_attr "type" "multiple")] 8042) 8043 8044(define_expand "cstoresi4" 8045 [(set (match_operand:SI 0 "s_register_operand") 8046 (match_operator:SI 1 "expandable_comparison_operator" 8047 [(match_operand:SI 2 "s_register_operand") 8048 (match_operand:SI 3 "reg_or_int_operand")]))] 8049 "TARGET_32BIT || TARGET_THUMB1" 8050 "{ 8051 rtx op3, scratch, scratch2; 8052 8053 if (!TARGET_THUMB1) 8054 { 8055 if (!arm_add_operand (operands[3], SImode)) 8056 operands[3] = force_reg (SImode, operands[3]); 8057 emit_insn (gen_cstore_cc (operands[0], operands[1], 8058 operands[2], operands[3])); 8059 DONE; 8060 } 8061 8062 if (operands[3] == const0_rtx) 8063 { 8064 switch (GET_CODE (operands[1])) 8065 { 8066 case EQ: 8067 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2])); 8068 break; 8069 8070 case NE: 8071 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2])); 8072 break; 8073 8074 case LE: 8075 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx, 8076 NULL_RTX, 0, OPTAB_WIDEN); 8077 scratch = expand_binop (SImode, ior_optab, operands[2], scratch, 8078 NULL_RTX, 0, OPTAB_WIDEN); 8079 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), 8080 operands[0], 1, OPTAB_WIDEN); 8081 break; 8082 8083 case GE: 8084 scratch = expand_unop (SImode, one_cmpl_optab, operands[2], 8085 NULL_RTX, 1); 8086 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), 8087 NULL_RTX, 1, OPTAB_WIDEN); 8088 break; 8089 8090 case GT: 8091 scratch = expand_binop (SImode, ashr_optab, operands[2], 8092 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN); 8093 scratch = expand_binop (SImode, sub_optab, scratch, operands[2], 8094 NULL_RTX, 0, OPTAB_WIDEN); 8095 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0], 8096 0, OPTAB_WIDEN); 8097 break; 8098 8099 /* LT is handled by generic code. No need for unsigned with 0. */ 8100 default: 8101 FAIL; 8102 } 8103 DONE; 8104 } 8105 8106 switch (GET_CODE (operands[1])) 8107 { 8108 case EQ: 8109 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3], 8110 NULL_RTX, 0, OPTAB_WIDEN); 8111 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch)); 8112 break; 8113 8114 case NE: 8115 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3], 8116 NULL_RTX, 0, OPTAB_WIDEN); 8117 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch)); 8118 break; 8119 8120 case LE: 8121 op3 = force_reg (SImode, operands[3]); 8122 8123 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31), 8124 NULL_RTX, 1, OPTAB_WIDEN); 8125 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31), 8126 NULL_RTX, 0, OPTAB_WIDEN); 8127 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2, 8128 op3, operands[2])); 8129 break; 8130 8131 case GE: 8132 op3 = operands[3]; 8133 if (!thumb1_cmp_operand (op3, SImode)) 8134 op3 = force_reg (SImode, op3); 8135 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31), 8136 NULL_RTX, 0, OPTAB_WIDEN); 8137 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31), 8138 NULL_RTX, 1, OPTAB_WIDEN); 8139 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2, 8140 operands[2], op3)); 8141 break; 8142 8143 case LEU: 8144 op3 = force_reg (SImode, operands[3]); 8145 scratch = force_reg (SImode, const0_rtx); 8146 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch, 8147 op3, operands[2])); 8148 break; 8149 8150 case GEU: 8151 op3 = operands[3]; 8152 if (!thumb1_cmp_operand (op3, SImode)) 8153 op3 = force_reg (SImode, op3); 8154 scratch = force_reg (SImode, const0_rtx); 8155 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch, 8156 operands[2], op3)); 8157 break; 8158 8159 case LTU: 8160 op3 = operands[3]; 8161 if (!thumb1_cmp_operand (op3, SImode)) 8162 op3 = force_reg (SImode, op3); 8163 scratch = gen_reg_rtx (SImode); 8164 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3)); 8165 break; 8166 8167 case GTU: 8168 op3 = force_reg (SImode, operands[3]); 8169 scratch = gen_reg_rtx (SImode); 8170 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2])); 8171 break; 8172 8173 /* No good sequences for GT, LT. */ 8174 default: 8175 FAIL; 8176 } 8177 DONE; 8178}") 8179 8180(define_expand "cstorehf4" 8181 [(set (match_operand:SI 0 "s_register_operand") 8182 (match_operator:SI 1 "expandable_comparison_operator" 8183 [(match_operand:HF 2 "s_register_operand") 8184 (match_operand:HF 3 "vfp_compare_operand")]))] 8185 "TARGET_VFP_FP16INST" 8186 { 8187 if (!arm_validize_comparison (&operands[1], 8188 &operands[2], 8189 &operands[3])) 8190 FAIL; 8191 8192 emit_insn (gen_cstore_cc (operands[0], operands[1], 8193 operands[2], operands[3])); 8194 DONE; 8195 } 8196) 8197 8198(define_expand "cstoresf4" 8199 [(set (match_operand:SI 0 "s_register_operand") 8200 (match_operator:SI 1 "expandable_comparison_operator" 8201 [(match_operand:SF 2 "s_register_operand") 8202 (match_operand:SF 3 "vfp_compare_operand")]))] 8203 "TARGET_32BIT && TARGET_HARD_FLOAT" 8204 "emit_insn (gen_cstore_cc (operands[0], operands[1], 8205 operands[2], operands[3])); DONE;" 8206) 8207 8208(define_expand "cstoredf4" 8209 [(set (match_operand:SI 0 "s_register_operand") 8210 (match_operator:SI 1 "expandable_comparison_operator" 8211 [(match_operand:DF 2 "s_register_operand") 8212 (match_operand:DF 3 "vfp_compare_operand")]))] 8213 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE" 8214 "emit_insn (gen_cstore_cc (operands[0], operands[1], 8215 operands[2], operands[3])); DONE;" 8216) 8217 8218(define_expand "cstoredi4" 8219 [(set (match_operand:SI 0 "s_register_operand") 8220 (match_operator:SI 1 "expandable_comparison_operator" 8221 [(match_operand:DI 2 "s_register_operand") 8222 (match_operand:DI 3 "reg_or_int_operand")]))] 8223 "TARGET_32BIT" 8224 "{ 8225 if (!arm_validize_comparison (&operands[1], 8226 &operands[2], 8227 &operands[3])) 8228 FAIL; 8229 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2], 8230 operands[3])); 8231 DONE; 8232 }" 8233) 8234 8235 8236;; Conditional move insns 8237 8238(define_expand "movsicc" 8239 [(set (match_operand:SI 0 "s_register_operand") 8240 (if_then_else:SI (match_operand 1 "expandable_comparison_operator") 8241 (match_operand:SI 2 "arm_not_operand") 8242 (match_operand:SI 3 "arm_not_operand")))] 8243 "TARGET_32BIT" 8244 " 8245 { 8246 enum rtx_code code; 8247 rtx ccreg; 8248 8249 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0), 8250 &XEXP (operands[1], 1))) 8251 FAIL; 8252 8253 code = GET_CODE (operands[1]); 8254 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0), 8255 XEXP (operands[1], 1), NULL_RTX); 8256 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx); 8257 }" 8258) 8259 8260(define_expand "movhfcc" 8261 [(set (match_operand:HF 0 "s_register_operand") 8262 (if_then_else:HF (match_operand 1 "arm_cond_move_operator") 8263 (match_operand:HF 2 "s_register_operand") 8264 (match_operand:HF 3 "s_register_operand")))] 8265 "TARGET_VFP_FP16INST" 8266 " 8267 { 8268 enum rtx_code code = GET_CODE (operands[1]); 8269 rtx ccreg; 8270 8271 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0), 8272 &XEXP (operands[1], 1))) 8273 FAIL; 8274 8275 code = GET_CODE (operands[1]); 8276 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0), 8277 XEXP (operands[1], 1), NULL_RTX); 8278 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx); 8279 }" 8280) 8281 8282(define_expand "movsfcc" 8283 [(set (match_operand:SF 0 "s_register_operand") 8284 (if_then_else:SF (match_operand 1 "arm_cond_move_operator") 8285 (match_operand:SF 2 "s_register_operand") 8286 (match_operand:SF 3 "s_register_operand")))] 8287 "TARGET_32BIT && TARGET_HARD_FLOAT" 8288 " 8289 { 8290 enum rtx_code code = GET_CODE (operands[1]); 8291 rtx ccreg; 8292 8293 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0), 8294 &XEXP (operands[1], 1))) 8295 FAIL; 8296 8297 code = GET_CODE (operands[1]); 8298 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0), 8299 XEXP (operands[1], 1), NULL_RTX); 8300 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx); 8301 }" 8302) 8303 8304(define_expand "movdfcc" 8305 [(set (match_operand:DF 0 "s_register_operand") 8306 (if_then_else:DF (match_operand 1 "arm_cond_move_operator") 8307 (match_operand:DF 2 "s_register_operand") 8308 (match_operand:DF 3 "s_register_operand")))] 8309 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE" 8310 " 8311 { 8312 enum rtx_code code = GET_CODE (operands[1]); 8313 rtx ccreg; 8314 8315 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0), 8316 &XEXP (operands[1], 1))) 8317 FAIL; 8318 code = GET_CODE (operands[1]); 8319 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0), 8320 XEXP (operands[1], 1), NULL_RTX); 8321 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx); 8322 }" 8323) 8324 8325(define_insn "*cmov<mode>" 8326 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>") 8327 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator" 8328 [(match_operand 2 "cc_register" "") (const_int 0)]) 8329 (match_operand:SDF 3 "s_register_operand" 8330 "<F_constraint>") 8331 (match_operand:SDF 4 "s_register_operand" 8332 "<F_constraint>")))] 8333 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>" 8334 "* 8335 { 8336 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]); 8337 switch (code) 8338 { 8339 case ARM_GE: 8340 case ARM_GT: 8341 case ARM_EQ: 8342 case ARM_VS: 8343 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\"; 8344 case ARM_LT: 8345 case ARM_LE: 8346 case ARM_NE: 8347 case ARM_VC: 8348 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\"; 8349 default: 8350 gcc_unreachable (); 8351 } 8352 return \"\"; 8353 }" 8354 [(set_attr "conds" "use") 8355 (set_attr "type" "fcsel")] 8356) 8357 8358(define_insn "*cmovhf" 8359 [(set (match_operand:HF 0 "s_register_operand" "=t") 8360 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator" 8361 [(match_operand 2 "cc_register" "") (const_int 0)]) 8362 (match_operand:HF 3 "s_register_operand" "t") 8363 (match_operand:HF 4 "s_register_operand" "t")))] 8364 "TARGET_VFP_FP16INST" 8365 "* 8366 { 8367 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]); 8368 switch (code) 8369 { 8370 case ARM_GE: 8371 case ARM_GT: 8372 case ARM_EQ: 8373 case ARM_VS: 8374 return \"vsel%d1.f16\\t%0, %3, %4\"; 8375 case ARM_LT: 8376 case ARM_LE: 8377 case ARM_NE: 8378 case ARM_VC: 8379 return \"vsel%D1.f16\\t%0, %4, %3\"; 8380 default: 8381 gcc_unreachable (); 8382 } 8383 return \"\"; 8384 }" 8385 [(set_attr "conds" "use") 8386 (set_attr "type" "fcsel")] 8387) 8388 8389(define_insn_and_split "*movsicc_insn" 8390 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r") 8391 (if_then_else:SI 8392 (match_operator 3 "arm_comparison_operator" 8393 [(match_operand 4 "cc_register" "") (const_int 0)]) 8394 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K") 8395 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))] 8396 "TARGET_ARM" 8397 "@ 8398 mov%D3\\t%0, %2 8399 mvn%D3\\t%0, #%B2 8400 mov%d3\\t%0, %1 8401 mvn%d3\\t%0, #%B1 8402 # 8403 # 8404 # 8405 #" 8406 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2 8407 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2 8408 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2 8409 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2" 8410 "&& reload_completed" 8411 [(const_int 0)] 8412 { 8413 enum rtx_code rev_code; 8414 machine_mode mode; 8415 rtx rev_cond; 8416 8417 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 8418 operands[3], 8419 gen_rtx_SET (operands[0], operands[1]))); 8420 8421 rev_code = GET_CODE (operands[3]); 8422 mode = GET_MODE (operands[4]); 8423 if (mode == CCFPmode || mode == CCFPEmode) 8424 rev_code = reverse_condition_maybe_unordered (rev_code); 8425 else 8426 rev_code = reverse_condition (rev_code); 8427 8428 rev_cond = gen_rtx_fmt_ee (rev_code, 8429 VOIDmode, 8430 operands[4], 8431 const0_rtx); 8432 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 8433 rev_cond, 8434 gen_rtx_SET (operands[0], operands[2]))); 8435 DONE; 8436 } 8437 [(set_attr "length" "4,4,4,4,8,8,8,8") 8438 (set_attr "conds" "use") 8439 (set_attr_alternative "type" 8440 [(if_then_else (match_operand 2 "const_int_operand" "") 8441 (const_string "mov_imm") 8442 (const_string "mov_reg")) 8443 (const_string "mvn_imm") 8444 (if_then_else (match_operand 1 "const_int_operand" "") 8445 (const_string "mov_imm") 8446 (const_string "mov_reg")) 8447 (const_string "mvn_imm") 8448 (const_string "multiple") 8449 (const_string "multiple") 8450 (const_string "multiple") 8451 (const_string "multiple")])] 8452) 8453 8454(define_insn "*movsfcc_soft_insn" 8455 [(set (match_operand:SF 0 "s_register_operand" "=r,r") 8456 (if_then_else:SF (match_operator 3 "arm_comparison_operator" 8457 [(match_operand 4 "cc_register" "") (const_int 0)]) 8458 (match_operand:SF 1 "s_register_operand" "0,r") 8459 (match_operand:SF 2 "s_register_operand" "r,0")))] 8460 "TARGET_ARM && TARGET_SOFT_FLOAT" 8461 "@ 8462 mov%D3\\t%0, %2 8463 mov%d3\\t%0, %1" 8464 [(set_attr "conds" "use") 8465 (set_attr "type" "mov_reg")] 8466) 8467 8468 8469;; Jump and linkage insns 8470 8471(define_expand "jump" 8472 [(set (pc) 8473 (label_ref (match_operand 0 "" "")))] 8474 "TARGET_EITHER" 8475 "" 8476) 8477 8478(define_insn "*arm_jump" 8479 [(set (pc) 8480 (label_ref (match_operand 0 "" "")))] 8481 "TARGET_32BIT" 8482 "* 8483 { 8484 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2) 8485 { 8486 arm_ccfsm_state += 2; 8487 return \"\"; 8488 } 8489 return \"b%?\\t%l0\"; 8490 } 8491 " 8492 [(set_attr "predicable" "yes") 8493 (set (attr "length") 8494 (if_then_else 8495 (and (match_test "TARGET_THUMB2") 8496 (and (ge (minus (match_dup 0) (pc)) (const_int -2044)) 8497 (le (minus (match_dup 0) (pc)) (const_int 2048)))) 8498 (const_int 2) 8499 (const_int 4))) 8500 (set_attr "type" "branch")] 8501) 8502 8503(define_expand "call" 8504 [(parallel [(call (match_operand 0 "memory_operand") 8505 (match_operand 1 "general_operand")) 8506 (use (match_operand 2 "" "")) 8507 (clobber (reg:SI LR_REGNUM))])] 8508 "TARGET_EITHER" 8509 " 8510 { 8511 rtx callee, pat; 8512 tree addr = MEM_EXPR (operands[0]); 8513 8514 /* In an untyped call, we can get NULL for operand 2. */ 8515 if (operands[2] == NULL_RTX) 8516 operands[2] = const0_rtx; 8517 8518 /* Decide if we should generate indirect calls by loading the 8519 32-bit address of the callee into a register before performing the 8520 branch and link. */ 8521 callee = XEXP (operands[0], 0); 8522 if (GET_CODE (callee) == SYMBOL_REF 8523 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee)) 8524 : !REG_P (callee)) 8525 XEXP (operands[0], 0) = force_reg (Pmode, callee); 8526 8527 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0))) 8528 /* Indirect call: set r9 with FDPIC value of callee. */ 8529 XEXP (operands[0], 0) 8530 = arm_load_function_descriptor (XEXP (operands[0], 0)); 8531 8532 if (detect_cmse_nonsecure_call (addr)) 8533 { 8534 pat = gen_nonsecure_call_internal (operands[0], operands[1], 8535 operands[2]); 8536 emit_call_insn (pat); 8537 } 8538 else 8539 { 8540 pat = gen_call_internal (operands[0], operands[1], operands[2]); 8541 arm_emit_call_insn (pat, XEXP (operands[0], 0), false); 8542 } 8543 8544 /* Restore FDPIC register (r9) after call. */ 8545 if (TARGET_FDPIC) 8546 { 8547 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM); 8548 rtx initial_fdpic_reg 8549 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM); 8550 8551 emit_insn (gen_restore_pic_register_after_call (fdpic_reg, 8552 initial_fdpic_reg)); 8553 } 8554 8555 DONE; 8556 }" 8557) 8558 8559(define_insn "restore_pic_register_after_call" 8560 [(set (match_operand:SI 0 "s_register_operand" "+r,r") 8561 (unspec:SI [(match_dup 0) 8562 (match_operand:SI 1 "nonimmediate_operand" "r,m")] 8563 UNSPEC_PIC_RESTORE))] 8564 "" 8565 "@ 8566 mov\t%0, %1 8567 ldr\t%0, %1" 8568) 8569 8570(define_expand "call_internal" 8571 [(parallel [(call (match_operand 0 "memory_operand") 8572 (match_operand 1 "general_operand")) 8573 (use (match_operand 2 "" "")) 8574 (clobber (reg:SI LR_REGNUM))])]) 8575 8576(define_expand "nonsecure_call_internal" 8577 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")] 8578 UNSPEC_NONSECURE_MEM) 8579 (match_operand 1 "general_operand")) 8580 (use (match_operand 2 "" "")) 8581 (clobber (reg:SI LR_REGNUM))])] 8582 "use_cmse" 8583 { 8584 rtx addr = XEXP (operands[0], 0); 8585 rtx tmp = REG_P (addr) ? addr : force_reg (SImode, addr); 8586 8587 if (!TARGET_HAVE_FPCXT_CMSE) 8588 { 8589 rtx r4 = gen_rtx_REG (SImode, R4_REGNUM); 8590 emit_move_insn (r4, tmp); 8591 tmp = r4; 8592 } 8593 8594 if (tmp != addr) 8595 operands[0] = replace_equiv_address (operands[0], tmp); 8596 } 8597) 8598 8599(define_insn "*call_reg_armv5" 8600 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r")) 8601 (match_operand 1 "" "")) 8602 (use (match_operand 2 "" "")) 8603 (clobber (reg:SI LR_REGNUM))] 8604 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)" 8605 "blx%?\\t%0" 8606 [(set_attr "type" "call")] 8607) 8608 8609(define_insn "*call_reg_arm" 8610 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r")) 8611 (match_operand 1 "" "")) 8612 (use (match_operand 2 "" "")) 8613 (clobber (reg:SI LR_REGNUM))] 8614 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)" 8615 "* 8616 return output_call (operands); 8617 " 8618 ;; length is worst case, normally it is only two 8619 [(set_attr "length" "12") 8620 (set_attr "type" "call")] 8621) 8622 8623 8624(define_expand "call_value" 8625 [(parallel [(set (match_operand 0 "" "") 8626 (call (match_operand 1 "memory_operand") 8627 (match_operand 2 "general_operand"))) 8628 (use (match_operand 3 "" "")) 8629 (clobber (reg:SI LR_REGNUM))])] 8630 "TARGET_EITHER" 8631 " 8632 { 8633 rtx pat, callee; 8634 tree addr = MEM_EXPR (operands[1]); 8635 8636 /* In an untyped call, we can get NULL for operand 2. */ 8637 if (operands[3] == 0) 8638 operands[3] = const0_rtx; 8639 8640 /* Decide if we should generate indirect calls by loading the 8641 32-bit address of the callee into a register before performing the 8642 branch and link. */ 8643 callee = XEXP (operands[1], 0); 8644 if (GET_CODE (callee) == SYMBOL_REF 8645 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee)) 8646 : !REG_P (callee)) 8647 XEXP (operands[1], 0) = force_reg (Pmode, callee); 8648 8649 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0))) 8650 /* Indirect call: set r9 with FDPIC value of callee. */ 8651 XEXP (operands[1], 0) 8652 = arm_load_function_descriptor (XEXP (operands[1], 0)); 8653 8654 if (detect_cmse_nonsecure_call (addr)) 8655 { 8656 pat = gen_nonsecure_call_value_internal (operands[0], operands[1], 8657 operands[2], operands[3]); 8658 emit_call_insn (pat); 8659 } 8660 else 8661 { 8662 pat = gen_call_value_internal (operands[0], operands[1], 8663 operands[2], operands[3]); 8664 arm_emit_call_insn (pat, XEXP (operands[1], 0), false); 8665 } 8666 8667 /* Restore FDPIC register (r9) after call. */ 8668 if (TARGET_FDPIC) 8669 { 8670 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM); 8671 rtx initial_fdpic_reg 8672 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM); 8673 8674 emit_insn (gen_restore_pic_register_after_call (fdpic_reg, 8675 initial_fdpic_reg)); 8676 } 8677 8678 DONE; 8679 }" 8680) 8681 8682(define_expand "call_value_internal" 8683 [(parallel [(set (match_operand 0 "" "") 8684 (call (match_operand 1 "memory_operand") 8685 (match_operand 2 "general_operand"))) 8686 (use (match_operand 3 "" "")) 8687 (clobber (reg:SI LR_REGNUM))])]) 8688 8689(define_expand "nonsecure_call_value_internal" 8690 [(parallel [(set (match_operand 0 "" "") 8691 (call (unspec:SI [(match_operand 1 "memory_operand")] 8692 UNSPEC_NONSECURE_MEM) 8693 (match_operand 2 "general_operand"))) 8694 (use (match_operand 3 "" "")) 8695 (clobber (reg:SI LR_REGNUM))])] 8696 "use_cmse" 8697 " 8698 { 8699 if (!TARGET_HAVE_FPCXT_CMSE) 8700 { 8701 rtx tmp = 8702 copy_to_suggested_reg (XEXP (operands[1], 0), 8703 gen_rtx_REG (SImode, R4_REGNUM), 8704 SImode); 8705 8706 operands[1] = replace_equiv_address (operands[1], tmp); 8707 } 8708 }") 8709 8710(define_insn "*call_value_reg_armv5" 8711 [(set (match_operand 0 "" "") 8712 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r")) 8713 (match_operand 2 "" ""))) 8714 (use (match_operand 3 "" "")) 8715 (clobber (reg:SI LR_REGNUM))] 8716 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)" 8717 "blx%?\\t%1" 8718 [(set_attr "type" "call")] 8719) 8720 8721(define_insn "*call_value_reg_arm" 8722 [(set (match_operand 0 "" "") 8723 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r")) 8724 (match_operand 2 "" ""))) 8725 (use (match_operand 3 "" "")) 8726 (clobber (reg:SI LR_REGNUM))] 8727 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)" 8728 "* 8729 return output_call (&operands[1]); 8730 " 8731 [(set_attr "length" "12") 8732 (set_attr "type" "call")] 8733) 8734 8735;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses 8736;; The 'a' causes the operand to be treated as an address, i.e. no '#' output. 8737 8738(define_insn "*call_symbol" 8739 [(call (mem:SI (match_operand:SI 0 "" "")) 8740 (match_operand 1 "" "")) 8741 (use (match_operand 2 "" "")) 8742 (clobber (reg:SI LR_REGNUM))] 8743 "TARGET_32BIT 8744 && !SIBLING_CALL_P (insn) 8745 && (GET_CODE (operands[0]) == SYMBOL_REF) 8746 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))" 8747 "* 8748 { 8749 rtx op = operands[0]; 8750 8751 /* Switch mode now when possible. */ 8752 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op)) 8753 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op))) 8754 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\"; 8755 8756 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\"; 8757 }" 8758 [(set_attr "type" "call")] 8759) 8760 8761(define_insn "*call_value_symbol" 8762 [(set (match_operand 0 "" "") 8763 (call (mem:SI (match_operand:SI 1 "" "")) 8764 (match_operand:SI 2 "" ""))) 8765 (use (match_operand 3 "" "")) 8766 (clobber (reg:SI LR_REGNUM))] 8767 "TARGET_32BIT 8768 && !SIBLING_CALL_P (insn) 8769 && (GET_CODE (operands[1]) == SYMBOL_REF) 8770 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))" 8771 "* 8772 { 8773 rtx op = operands[1]; 8774 8775 /* Switch mode now when possible. */ 8776 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op)) 8777 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op))) 8778 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\"; 8779 8780 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\"; 8781 }" 8782 [(set_attr "type" "call")] 8783) 8784 8785(define_expand "sibcall_internal" 8786 [(parallel [(call (match_operand 0 "memory_operand") 8787 (match_operand 1 "general_operand")) 8788 (return) 8789 (use (match_operand 2 "" ""))])]) 8790 8791;; We may also be able to do sibcalls for Thumb, but it's much harder... 8792(define_expand "sibcall" 8793 [(parallel [(call (match_operand 0 "memory_operand") 8794 (match_operand 1 "general_operand")) 8795 (return) 8796 (use (match_operand 2 "" ""))])] 8797 "TARGET_32BIT" 8798 " 8799 { 8800 rtx pat; 8801 8802 if ((!REG_P (XEXP (operands[0], 0)) 8803 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF) 8804 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF 8805 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0))))) 8806 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0)); 8807 8808 if (operands[2] == NULL_RTX) 8809 operands[2] = const0_rtx; 8810 8811 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]); 8812 arm_emit_call_insn (pat, operands[0], true); 8813 DONE; 8814 }" 8815) 8816 8817(define_expand "sibcall_value_internal" 8818 [(parallel [(set (match_operand 0 "" "") 8819 (call (match_operand 1 "memory_operand") 8820 (match_operand 2 "general_operand"))) 8821 (return) 8822 (use (match_operand 3 "" ""))])]) 8823 8824(define_expand "sibcall_value" 8825 [(parallel [(set (match_operand 0 "" "") 8826 (call (match_operand 1 "memory_operand") 8827 (match_operand 2 "general_operand"))) 8828 (return) 8829 (use (match_operand 3 "" ""))])] 8830 "TARGET_32BIT" 8831 " 8832 { 8833 rtx pat; 8834 8835 if ((!REG_P (XEXP (operands[1], 0)) 8836 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF) 8837 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF 8838 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0))))) 8839 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0)); 8840 8841 if (operands[3] == NULL_RTX) 8842 operands[3] = const0_rtx; 8843 8844 pat = gen_sibcall_value_internal (operands[0], operands[1], 8845 operands[2], operands[3]); 8846 arm_emit_call_insn (pat, operands[1], true); 8847 DONE; 8848 }" 8849) 8850 8851(define_insn "*sibcall_insn" 8852 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US")) 8853 (match_operand 1 "" "")) 8854 (return) 8855 (use (match_operand 2 "" ""))] 8856 "TARGET_32BIT && SIBLING_CALL_P (insn)" 8857 "* 8858 if (which_alternative == 1) 8859 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\"; 8860 else 8861 { 8862 if (arm_arch5t || arm_arch4t) 8863 return \"bx%?\\t%0\\t%@ indirect register sibling call\"; 8864 else 8865 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\"; 8866 } 8867 " 8868 [(set_attr "type" "call")] 8869) 8870 8871(define_insn "*sibcall_value_insn" 8872 [(set (match_operand 0 "" "") 8873 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US")) 8874 (match_operand 2 "" ""))) 8875 (return) 8876 (use (match_operand 3 "" ""))] 8877 "TARGET_32BIT && SIBLING_CALL_P (insn)" 8878 "* 8879 if (which_alternative == 1) 8880 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\"; 8881 else 8882 { 8883 if (arm_arch5t || arm_arch4t) 8884 return \"bx%?\\t%1\"; 8885 else 8886 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \"; 8887 } 8888 " 8889 [(set_attr "type" "call")] 8890) 8891 8892(define_expand "<return_str>return" 8893 [(RETURNS)] 8894 "(TARGET_ARM || (TARGET_THUMB2 8895 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL 8896 && !IS_STACKALIGN (arm_current_func_type ()))) 8897 <return_cond_false>" 8898 " 8899 { 8900 if (TARGET_THUMB2) 8901 { 8902 thumb2_expand_return (<return_simple_p>); 8903 DONE; 8904 } 8905 } 8906 " 8907) 8908 8909;; Often the return insn will be the same as loading from memory, so set attr 8910(define_insn "*arm_return" 8911 [(return)] 8912 "TARGET_ARM && USE_RETURN_INSN (FALSE)" 8913 "* 8914 { 8915 if (arm_ccfsm_state == 2) 8916 { 8917 arm_ccfsm_state += 2; 8918 return \"\"; 8919 } 8920 return output_return_instruction (const_true_rtx, true, false, false); 8921 }" 8922 [(set_attr "type" "load_4") 8923 (set_attr "length" "12") 8924 (set_attr "predicable" "yes")] 8925) 8926 8927(define_insn "*cond_<return_str>return" 8928 [(set (pc) 8929 (if_then_else (match_operator 0 "arm_comparison_operator" 8930 [(match_operand 1 "cc_register" "") (const_int 0)]) 8931 (RETURNS) 8932 (pc)))] 8933 "TARGET_ARM <return_cond_true>" 8934 "* 8935 { 8936 if (arm_ccfsm_state == 2) 8937 { 8938 arm_ccfsm_state += 2; 8939 return \"\"; 8940 } 8941 return output_return_instruction (operands[0], true, false, 8942 <return_simple_p>); 8943 }" 8944 [(set_attr "conds" "use") 8945 (set_attr "length" "12") 8946 (set_attr "type" "load_4")] 8947) 8948 8949(define_insn "*cond_<return_str>return_inverted" 8950 [(set (pc) 8951 (if_then_else (match_operator 0 "arm_comparison_operator" 8952 [(match_operand 1 "cc_register" "") (const_int 0)]) 8953 (pc) 8954 (RETURNS)))] 8955 "TARGET_ARM <return_cond_true>" 8956 "* 8957 { 8958 if (arm_ccfsm_state == 2) 8959 { 8960 arm_ccfsm_state += 2; 8961 return \"\"; 8962 } 8963 return output_return_instruction (operands[0], true, true, 8964 <return_simple_p>); 8965 }" 8966 [(set_attr "conds" "use") 8967 (set_attr "length" "12") 8968 (set_attr "type" "load_4")] 8969) 8970 8971(define_insn "*arm_simple_return" 8972 [(simple_return)] 8973 "TARGET_ARM" 8974 "* 8975 { 8976 if (arm_ccfsm_state == 2) 8977 { 8978 arm_ccfsm_state += 2; 8979 return \"\"; 8980 } 8981 return output_return_instruction (const_true_rtx, true, false, true); 8982 }" 8983 [(set_attr "type" "branch") 8984 (set_attr "length" "4") 8985 (set_attr "predicable" "yes")] 8986) 8987 8988;; Generate a sequence of instructions to determine if the processor is 8989;; in 26-bit or 32-bit mode, and return the appropriate return address 8990;; mask. 8991 8992(define_expand "return_addr_mask" 8993 [(set (match_dup 1) 8994 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH) 8995 (const_int 0))) 8996 (set (match_operand:SI 0 "s_register_operand") 8997 (if_then_else:SI (eq (match_dup 1) (const_int 0)) 8998 (const_int -1) 8999 (const_int 67108860)))] ; 0x03fffffc 9000 "TARGET_ARM" 9001 " 9002 operands[1] = gen_rtx_REG (CC_NZmode, CC_REGNUM); 9003 ") 9004 9005(define_insn "*check_arch2" 9006 [(set (match_operand:CC_NZ 0 "cc_register" "") 9007 (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH) 9008 (const_int 0)))] 9009 "TARGET_ARM" 9010 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc" 9011 [(set_attr "length" "8") 9012 (set_attr "conds" "set") 9013 (set_attr "type" "multiple")] 9014) 9015 9016;; Call subroutine returning any type. 9017 9018(define_expand "untyped_call" 9019 [(parallel [(call (match_operand 0 "" "") 9020 (const_int 0)) 9021 (match_operand 1 "" "") 9022 (match_operand 2 "" "")])] 9023 "TARGET_EITHER && !TARGET_FDPIC" 9024 " 9025 { 9026 int i; 9027 rtx par = gen_rtx_PARALLEL (VOIDmode, 9028 rtvec_alloc (XVECLEN (operands[2], 0))); 9029 rtx addr = gen_reg_rtx (Pmode); 9030 rtx mem; 9031 int size = 0; 9032 9033 emit_move_insn (addr, XEXP (operands[1], 0)); 9034 mem = change_address (operands[1], BLKmode, addr); 9035 9036 for (i = 0; i < XVECLEN (operands[2], 0); i++) 9037 { 9038 rtx src = SET_SRC (XVECEXP (operands[2], 0, i)); 9039 9040 /* Default code only uses r0 as a return value, but we could 9041 be using anything up to 4 registers. */ 9042 if (REGNO (src) == R0_REGNUM) 9043 src = gen_rtx_REG (TImode, R0_REGNUM); 9044 9045 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src, 9046 GEN_INT (size)); 9047 size += GET_MODE_SIZE (GET_MODE (src)); 9048 } 9049 9050 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL)); 9051 9052 size = 0; 9053 9054 for (i = 0; i < XVECLEN (par, 0); i++) 9055 { 9056 HOST_WIDE_INT offset = 0; 9057 rtx reg = XEXP (XVECEXP (par, 0, i), 0); 9058 9059 if (size != 0) 9060 emit_move_insn (addr, plus_constant (Pmode, addr, size)); 9061 9062 mem = change_address (mem, GET_MODE (reg), NULL); 9063 if (REGNO (reg) == R0_REGNUM) 9064 { 9065 /* On thumb we have to use a write-back instruction. */ 9066 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr, 9067 TARGET_THUMB ? TRUE : FALSE, mem, &offset)); 9068 size = TARGET_ARM ? 16 : 0; 9069 } 9070 else 9071 { 9072 emit_move_insn (mem, reg); 9073 size = GET_MODE_SIZE (GET_MODE (reg)); 9074 } 9075 } 9076 9077 /* The optimizer does not know that the call sets the function value 9078 registers we stored in the result block. We avoid problems by 9079 claiming that all hard registers are used and clobbered at this 9080 point. */ 9081 emit_insn (gen_blockage ()); 9082 9083 DONE; 9084 }" 9085) 9086 9087(define_expand "untyped_return" 9088 [(match_operand:BLK 0 "memory_operand") 9089 (match_operand 1 "" "")] 9090 "TARGET_EITHER && !TARGET_FDPIC" 9091 " 9092 { 9093 int i; 9094 rtx addr = gen_reg_rtx (Pmode); 9095 rtx mem; 9096 int size = 0; 9097 9098 emit_move_insn (addr, XEXP (operands[0], 0)); 9099 mem = change_address (operands[0], BLKmode, addr); 9100 9101 for (i = 0; i < XVECLEN (operands[1], 0); i++) 9102 { 9103 HOST_WIDE_INT offset = 0; 9104 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i)); 9105 9106 if (size != 0) 9107 emit_move_insn (addr, plus_constant (Pmode, addr, size)); 9108 9109 mem = change_address (mem, GET_MODE (reg), NULL); 9110 if (REGNO (reg) == R0_REGNUM) 9111 { 9112 /* On thumb we have to use a write-back instruction. */ 9113 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr, 9114 TARGET_THUMB ? TRUE : FALSE, mem, &offset)); 9115 size = TARGET_ARM ? 16 : 0; 9116 } 9117 else 9118 { 9119 emit_move_insn (reg, mem); 9120 size = GET_MODE_SIZE (GET_MODE (reg)); 9121 } 9122 } 9123 9124 /* Emit USE insns before the return. */ 9125 for (i = 0; i < XVECLEN (operands[1], 0); i++) 9126 emit_use (SET_DEST (XVECEXP (operands[1], 0, i))); 9127 9128 /* Construct the return. */ 9129 expand_naked_return (); 9130 9131 DONE; 9132 }" 9133) 9134 9135;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and 9136;; all of memory. This blocks insns from being moved across this point. 9137 9138(define_insn "blockage" 9139 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)] 9140 "TARGET_EITHER" 9141 "" 9142 [(set_attr "length" "0") 9143 (set_attr "type" "block")] 9144) 9145 9146;; Since we hard code r0 here use the 'o' constraint to prevent 9147;; provoking undefined behaviour in the hardware with putting out 9148;; auto-increment operations with potentially r0 as the base register. 9149(define_insn "probe_stack" 9150 [(set (match_operand:SI 0 "memory_operand" "=o") 9151 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))] 9152 "TARGET_32BIT" 9153 "str%?\\tr0, %0" 9154 [(set_attr "type" "store_4") 9155 (set_attr "predicable" "yes")] 9156) 9157 9158(define_insn "probe_stack_range" 9159 [(set (match_operand:SI 0 "register_operand" "=r") 9160 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0") 9161 (match_operand:SI 2 "register_operand" "r")] 9162 VUNSPEC_PROBE_STACK_RANGE))] 9163 "TARGET_32BIT" 9164{ 9165 return output_probe_stack_range (operands[0], operands[2]); 9166} 9167 [(set_attr "type" "multiple") 9168 (set_attr "conds" "clob")] 9169) 9170 9171;; Named patterns for stack smashing protection. 9172(define_expand "stack_protect_combined_set" 9173 [(parallel 9174 [(set (match_operand:SI 0 "memory_operand") 9175 (unspec:SI [(match_operand:SI 1 "guard_operand")] 9176 UNSPEC_SP_SET)) 9177 (clobber (match_scratch:SI 2 "")) 9178 (clobber (match_scratch:SI 3 ""))])] 9179 "" 9180 "" 9181) 9182 9183;; Use a separate insn from the above expand to be able to have the mem outside 9184;; the operand #1 when register allocation comes. This is needed to avoid LRA 9185;; try to reload the guard since we need to control how PIC access is done in 9186;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling 9187;; legitimize_pic_address ()). 9188(define_insn_and_split "*stack_protect_combined_set_insn" 9189 [(set (match_operand:SI 0 "memory_operand" "=m,m") 9190 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))] 9191 UNSPEC_SP_SET)) 9192 (clobber (match_scratch:SI 2 "=&l,&r")) 9193 (clobber (match_scratch:SI 3 "=&l,&r"))] 9194 "" 9195 "#" 9196 "reload_completed" 9197 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))] 9198 UNSPEC_SP_SET)) 9199 (clobber (match_dup 2))])] 9200 " 9201{ 9202 if (flag_pic) 9203 { 9204 rtx pic_reg; 9205 9206 if (TARGET_FDPIC) 9207 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM); 9208 else 9209 pic_reg = operands[3]; 9210 9211 /* Forces recomputing of GOT base now. */ 9212 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg, 9213 true /*compute_now*/); 9214 } 9215 else 9216 { 9217 if (address_operand (operands[1], SImode)) 9218 operands[2] = operands[1]; 9219 else 9220 { 9221 rtx mem = force_const_mem (SImode, operands[1]); 9222 if (!general_operand (mem, SImode)) 9223 { 9224 emit_move_insn (operands[2], XEXP (mem, 0)); 9225 mem = replace_equiv_address (mem, operands[2], false); 9226 } 9227 emit_move_insn (operands[2], mem); 9228 } 9229 } 9230}" 9231 [(set_attr "arch" "t1,32")] 9232) 9233 9234;; DO NOT SPLIT THIS INSN. It's important for security reasons that the 9235;; canary value does not live beyond the life of this sequence. 9236(define_insn "*stack_protect_set_insn" 9237 [(set (match_operand:SI 0 "memory_operand" "=m,m") 9238 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))] 9239 UNSPEC_SP_SET)) 9240 (clobber (match_dup 1))] 9241 "" 9242 "@ 9243 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0 9244 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0" 9245 [(set_attr "length" "8,12") 9246 (set_attr "conds" "clob,nocond") 9247 (set_attr "type" "multiple") 9248 (set_attr "arch" "t1,32")] 9249) 9250 9251(define_expand "stack_protect_combined_test" 9252 [(parallel 9253 [(set (pc) 9254 (if_then_else 9255 (eq (match_operand:SI 0 "memory_operand") 9256 (unspec:SI [(match_operand:SI 1 "guard_operand")] 9257 UNSPEC_SP_TEST)) 9258 (label_ref (match_operand 2)) 9259 (pc))) 9260 (clobber (match_scratch:SI 3 "")) 9261 (clobber (match_scratch:SI 4 "")) 9262 (clobber (reg:CC CC_REGNUM))])] 9263 "" 9264 "" 9265) 9266 9267;; Use a separate insn from the above expand to be able to have the mem outside 9268;; the operand #1 when register allocation comes. This is needed to avoid LRA 9269;; try to reload the guard since we need to control how PIC access is done in 9270;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling 9271;; legitimize_pic_address ()). 9272(define_insn_and_split "*stack_protect_combined_test_insn" 9273 [(set (pc) 9274 (if_then_else 9275 (eq (match_operand:SI 0 "memory_operand" "m,m") 9276 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))] 9277 UNSPEC_SP_TEST)) 9278 (label_ref (match_operand 2)) 9279 (pc))) 9280 (clobber (match_scratch:SI 3 "=&l,&r")) 9281 (clobber (match_scratch:SI 4 "=&l,&r")) 9282 (clobber (reg:CC CC_REGNUM))] 9283 "" 9284 "#" 9285 "reload_completed" 9286 [(const_int 0)] 9287{ 9288 rtx eq; 9289 9290 if (flag_pic) 9291 { 9292 rtx pic_reg; 9293 9294 if (TARGET_FDPIC) 9295 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM); 9296 else 9297 pic_reg = operands[4]; 9298 9299 /* Forces recomputing of GOT base now. */ 9300 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg, 9301 true /*compute_now*/); 9302 } 9303 else 9304 { 9305 if (address_operand (operands[1], SImode)) 9306 operands[3] = operands[1]; 9307 else 9308 { 9309 rtx mem = force_const_mem (SImode, operands[1]); 9310 if (!general_operand (mem, SImode)) 9311 { 9312 emit_move_insn (operands[3], XEXP (mem, 0)); 9313 mem = replace_equiv_address (mem, operands[3], false); 9314 } 9315 emit_move_insn (operands[3], mem); 9316 } 9317 } 9318 if (TARGET_32BIT) 9319 { 9320 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0], 9321 operands[3])); 9322 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM); 9323 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx); 9324 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg)); 9325 } 9326 else 9327 { 9328 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0], 9329 operands[3])); 9330 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx); 9331 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx, 9332 operands[2])); 9333 } 9334 DONE; 9335} 9336 [(set_attr "arch" "t1,32")] 9337) 9338 9339;; DO NOT SPLIT THIS PATTERN. It is important for security reasons that the 9340;; canary value does not live beyond the end of this sequence. 9341(define_insn "arm_stack_protect_test_insn" 9342 [(set (reg:CC_Z CC_REGNUM) 9343 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m") 9344 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))] 9345 UNSPEC_SP_TEST) 9346 (const_int 0))) 9347 (clobber (match_operand:SI 0 "register_operand" "=&l,&r")) 9348 (clobber (match_dup 2))] 9349 "TARGET_32BIT" 9350 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;mov\t%2, #0" 9351 [(set_attr "length" "12,16") 9352 (set_attr "conds" "set") 9353 (set_attr "type" "multiple") 9354 (set_attr "arch" "t,32")] 9355) 9356 9357(define_expand "casesi" 9358 [(match_operand:SI 0 "s_register_operand") ; index to jump on 9359 (match_operand:SI 1 "const_int_operand") ; lower bound 9360 (match_operand:SI 2 "const_int_operand") ; total range 9361 (match_operand:SI 3 "" "") ; table label 9362 (match_operand:SI 4 "" "")] ; Out of range label 9363 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code" 9364 " 9365 { 9366 enum insn_code code; 9367 if (operands[1] != const0_rtx) 9368 { 9369 rtx reg = gen_reg_rtx (SImode); 9370 9371 emit_insn (gen_addsi3 (reg, operands[0], 9372 gen_int_mode (-INTVAL (operands[1]), 9373 SImode))); 9374 operands[0] = reg; 9375 } 9376 9377 if (TARGET_ARM) 9378 code = CODE_FOR_arm_casesi_internal; 9379 else if (TARGET_THUMB1) 9380 code = CODE_FOR_thumb1_casesi_internal_pic; 9381 else if (flag_pic) 9382 code = CODE_FOR_thumb2_casesi_internal_pic; 9383 else 9384 code = CODE_FOR_thumb2_casesi_internal; 9385 9386 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode)) 9387 operands[2] = force_reg (SImode, operands[2]); 9388 9389 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2], 9390 operands[3], operands[4])); 9391 DONE; 9392 }" 9393) 9394 9395;; The USE in this pattern is needed to tell flow analysis that this is 9396;; a CASESI insn. It has no other purpose. 9397(define_expand "arm_casesi_internal" 9398 [(parallel [(set (pc) 9399 (if_then_else 9400 (leu (match_operand:SI 0 "s_register_operand") 9401 (match_operand:SI 1 "arm_rhs_operand")) 9402 (match_dup 4) 9403 (label_ref:SI (match_operand 3 "")))) 9404 (clobber (reg:CC CC_REGNUM)) 9405 (use (label_ref:SI (match_operand 2 "")))])] 9406 "TARGET_ARM" 9407{ 9408 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4)); 9409 operands[4] = gen_rtx_PLUS (SImode, operands[4], 9410 gen_rtx_LABEL_REF (SImode, operands[2])); 9411 operands[4] = gen_rtx_MEM (SImode, operands[4]); 9412 MEM_READONLY_P (operands[4]) = 1; 9413 MEM_NOTRAP_P (operands[4]) = 1; 9414}) 9415 9416(define_insn "*arm_casesi_internal" 9417 [(parallel [(set (pc) 9418 (if_then_else 9419 (leu (match_operand:SI 0 "s_register_operand" "r") 9420 (match_operand:SI 1 "arm_rhs_operand" "rI")) 9421 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4)) 9422 (label_ref:SI (match_operand 2 "" "")))) 9423 (label_ref:SI (match_operand 3 "" "")))) 9424 (clobber (reg:CC CC_REGNUM)) 9425 (use (label_ref:SI (match_dup 2)))])] 9426 "TARGET_ARM" 9427 "* 9428 if (flag_pic) 9429 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\"; 9430 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\"; 9431 " 9432 [(set_attr "conds" "clob") 9433 (set_attr "length" "12") 9434 (set_attr "type" "multiple")] 9435) 9436 9437(define_expand "indirect_jump" 9438 [(set (pc) 9439 (match_operand:SI 0 "s_register_operand"))] 9440 "TARGET_EITHER" 9441 " 9442 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the 9443 address and use bx. */ 9444 if (TARGET_THUMB2) 9445 { 9446 rtx tmp; 9447 tmp = gen_reg_rtx (SImode); 9448 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1))); 9449 operands[0] = tmp; 9450 } 9451 " 9452) 9453 9454;; NB Never uses BX. 9455(define_insn "*arm_indirect_jump" 9456 [(set (pc) 9457 (match_operand:SI 0 "s_register_operand" "r"))] 9458 "TARGET_ARM" 9459 "mov%?\\t%|pc, %0\\t%@ indirect register jump" 9460 [(set_attr "predicable" "yes") 9461 (set_attr "type" "branch")] 9462) 9463 9464(define_insn "*load_indirect_jump" 9465 [(set (pc) 9466 (match_operand:SI 0 "memory_operand" "m"))] 9467 "TARGET_ARM" 9468 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump" 9469 [(set_attr "type" "load_4") 9470 (set_attr "pool_range" "4096") 9471 (set_attr "neg_pool_range" "4084") 9472 (set_attr "predicable" "yes")] 9473) 9474 9475 9476;; Misc insns 9477 9478(define_insn "nop" 9479 [(const_int 0)] 9480 "TARGET_EITHER" 9481 "nop" 9482 [(set (attr "length") 9483 (if_then_else (eq_attr "is_thumb" "yes") 9484 (const_int 2) 9485 (const_int 4))) 9486 (set_attr "type" "mov_reg")] 9487) 9488 9489(define_insn "trap" 9490 [(trap_if (const_int 1) (const_int 0))] 9491 "" 9492 "* 9493 if (TARGET_ARM) 9494 return \".inst\\t0xe7f000f0\"; 9495 else 9496 return \".inst\\t0xdeff\"; 9497 " 9498 [(set (attr "length") 9499 (if_then_else (eq_attr "is_thumb" "yes") 9500 (const_int 2) 9501 (const_int 4))) 9502 (set_attr "type" "trap") 9503 (set_attr "conds" "unconditional")] 9504) 9505 9506 9507;; Patterns to allow combination of arithmetic, cond code and shifts 9508 9509(define_insn "*<arith_shift_insn>_multsi" 9510 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 9511 (SHIFTABLE_OPS:SI 9512 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r") 9513 (match_operand:SI 3 "power_of_two_operand" "")) 9514 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))] 9515 "TARGET_32BIT" 9516 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3" 9517 [(set_attr "predicable" "yes") 9518 (set_attr "shift" "2") 9519 (set_attr "arch" "a,t2") 9520 (set_attr "autodetect_type" "alu_shift_mul_op3")]) 9521 9522(define_insn "*<arith_shift_insn>_shiftsi" 9523 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") 9524 (SHIFTABLE_OPS:SI 9525 (match_operator:SI 2 "shift_nomul_operator" 9526 [(match_operand:SI 3 "s_register_operand" "r,r,r") 9527 (match_operand:SI 4 "shift_amount_operand" "M,M,r")]) 9528 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))] 9529 "TARGET_32BIT && GET_CODE (operands[2]) != MULT" 9530 "<arith_shift_insn>%?\\t%0, %1, %3%S2" 9531 [(set_attr "predicable" "yes") 9532 (set_attr "shift" "3") 9533 (set_attr "arch" "a,t2,a") 9534 (set_attr "autodetect_type" "alu_shift_operator2")]) 9535 9536(define_split 9537 [(set (match_operand:SI 0 "s_register_operand" "") 9538 (match_operator:SI 1 "shiftable_operator" 9539 [(match_operator:SI 2 "shiftable_operator" 9540 [(match_operator:SI 3 "shift_operator" 9541 [(match_operand:SI 4 "s_register_operand" "") 9542 (match_operand:SI 5 "reg_or_int_operand" "")]) 9543 (match_operand:SI 6 "s_register_operand" "")]) 9544 (match_operand:SI 7 "arm_rhs_operand" "")])) 9545 (clobber (match_operand:SI 8 "s_register_operand" ""))] 9546 "TARGET_32BIT" 9547 [(set (match_dup 8) 9548 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)]) 9549 (match_dup 6)])) 9550 (set (match_dup 0) 9551 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))] 9552 "") 9553 9554(define_insn "*arith_shiftsi_compare0" 9555 [(set (reg:CC_NZ CC_REGNUM) 9556 (compare:CC_NZ 9557 (match_operator:SI 1 "shiftable_operator" 9558 [(match_operator:SI 3 "shift_operator" 9559 [(match_operand:SI 4 "s_register_operand" "r,r") 9560 (match_operand:SI 5 "shift_amount_operand" "M,r")]) 9561 (match_operand:SI 2 "s_register_operand" "r,r")]) 9562 (const_int 0))) 9563 (set (match_operand:SI 0 "s_register_operand" "=r,r") 9564 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)]) 9565 (match_dup 2)]))] 9566 "TARGET_32BIT" 9567 "%i1s%?\\t%0, %2, %4%S3" 9568 [(set_attr "conds" "set") 9569 (set_attr "shift" "4") 9570 (set_attr "arch" "32,a") 9571 (set_attr "type" "alus_shift_imm,alus_shift_reg")]) 9572 9573(define_insn "*arith_shiftsi_compare0_scratch" 9574 [(set (reg:CC_NZ CC_REGNUM) 9575 (compare:CC_NZ 9576 (match_operator:SI 1 "shiftable_operator" 9577 [(match_operator:SI 3 "shift_operator" 9578 [(match_operand:SI 4 "s_register_operand" "r,r") 9579 (match_operand:SI 5 "shift_amount_operand" "M,r")]) 9580 (match_operand:SI 2 "s_register_operand" "r,r")]) 9581 (const_int 0))) 9582 (clobber (match_scratch:SI 0 "=r,r"))] 9583 "TARGET_32BIT" 9584 "%i1s%?\\t%0, %2, %4%S3" 9585 [(set_attr "conds" "set") 9586 (set_attr "shift" "4") 9587 (set_attr "arch" "32,a") 9588 (set_attr "type" "alus_shift_imm,alus_shift_reg")]) 9589 9590(define_insn "*sub_shiftsi" 9591 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 9592 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r") 9593 (match_operator:SI 2 "shift_operator" 9594 [(match_operand:SI 3 "s_register_operand" "r,r") 9595 (match_operand:SI 4 "shift_amount_operand" "M,r")])))] 9596 "TARGET_32BIT" 9597 "sub%?\\t%0, %1, %3%S2" 9598 [(set_attr "predicable" "yes") 9599 (set_attr "predicable_short_it" "no") 9600 (set_attr "shift" "3") 9601 (set_attr "arch" "32,a") 9602 (set_attr "type" "alus_shift_imm,alus_shift_reg")]) 9603 9604(define_insn "*sub_shiftsi_compare0" 9605 [(set (reg:CC_NZ CC_REGNUM) 9606 (compare:CC_NZ 9607 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r") 9608 (match_operator:SI 2 "shift_operator" 9609 [(match_operand:SI 3 "s_register_operand" "r,r") 9610 (match_operand:SI 4 "shift_amount_operand" "M,r")])) 9611 (const_int 0))) 9612 (set (match_operand:SI 0 "s_register_operand" "=r,r") 9613 (minus:SI (match_dup 1) 9614 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))] 9615 "TARGET_32BIT" 9616 "subs%?\\t%0, %1, %3%S2" 9617 [(set_attr "conds" "set") 9618 (set_attr "shift" "3") 9619 (set_attr "arch" "32,a") 9620 (set_attr "type" "alus_shift_imm,alus_shift_reg")]) 9621 9622(define_insn "*sub_shiftsi_compare0_scratch" 9623 [(set (reg:CC_NZ CC_REGNUM) 9624 (compare:CC_NZ 9625 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r") 9626 (match_operator:SI 2 "shift_operator" 9627 [(match_operand:SI 3 "s_register_operand" "r,r") 9628 (match_operand:SI 4 "shift_amount_operand" "M,r")])) 9629 (const_int 0))) 9630 (clobber (match_scratch:SI 0 "=r,r"))] 9631 "TARGET_32BIT" 9632 "subs%?\\t%0, %1, %3%S2" 9633 [(set_attr "conds" "set") 9634 (set_attr "shift" "3") 9635 (set_attr "arch" "32,a") 9636 (set_attr "type" "alus_shift_imm,alus_shift_reg")]) 9637 9638 9639(define_insn_and_split "*and_scc" 9640 [(set (match_operand:SI 0 "s_register_operand" "=r") 9641 (and:SI (match_operator:SI 1 "arm_comparison_operator" 9642 [(match_operand 2 "cc_register" "") (const_int 0)]) 9643 (match_operand:SI 3 "s_register_operand" "r")))] 9644 "TARGET_ARM" 9645 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1" 9646 "&& reload_completed" 9647 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0))) 9648 (cond_exec (match_dup 4) (set (match_dup 0) 9649 (and:SI (match_dup 3) (const_int 1))))] 9650 { 9651 machine_mode mode = GET_MODE (operands[2]); 9652 enum rtx_code rc = GET_CODE (operands[1]); 9653 9654 /* Note that operands[4] is the same as operands[1], 9655 but with VOIDmode as the result. */ 9656 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx); 9657 if (mode == CCFPmode || mode == CCFPEmode) 9658 rc = reverse_condition_maybe_unordered (rc); 9659 else 9660 rc = reverse_condition (rc); 9661 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx); 9662 } 9663 [(set_attr "conds" "use") 9664 (set_attr "type" "multiple") 9665 (set_attr "length" "8")] 9666) 9667 9668(define_insn_and_split "*ior_scc" 9669 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 9670 (ior:SI (match_operator:SI 1 "arm_comparison_operator" 9671 [(match_operand 2 "cc_register" "") (const_int 0)]) 9672 (match_operand:SI 3 "s_register_operand" "0,?r")))] 9673 "TARGET_ARM" 9674 "@ 9675 orr%d1\\t%0, %3, #1 9676 #" 9677 "&& reload_completed 9678 && REGNO (operands [0]) != REGNO (operands[3])" 9679 ;; && which_alternative == 1 9680 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1 9681 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3))) 9682 (cond_exec (match_dup 4) (set (match_dup 0) 9683 (ior:SI (match_dup 3) (const_int 1))))] 9684 { 9685 machine_mode mode = GET_MODE (operands[2]); 9686 enum rtx_code rc = GET_CODE (operands[1]); 9687 9688 /* Note that operands[4] is the same as operands[1], 9689 but with VOIDmode as the result. */ 9690 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx); 9691 if (mode == CCFPmode || mode == CCFPEmode) 9692 rc = reverse_condition_maybe_unordered (rc); 9693 else 9694 rc = reverse_condition (rc); 9695 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx); 9696 } 9697 [(set_attr "conds" "use") 9698 (set_attr "length" "4,8") 9699 (set_attr "type" "logic_imm,multiple")] 9700) 9701 9702; A series of splitters for the compare_scc pattern below. Note that 9703; order is important. 9704(define_split 9705 [(set (match_operand:SI 0 "s_register_operand" "") 9706 (lt:SI (match_operand:SI 1 "s_register_operand" "") 9707 (const_int 0))) 9708 (clobber (reg:CC CC_REGNUM))] 9709 "TARGET_32BIT && reload_completed" 9710 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))]) 9711 9712(define_split 9713 [(set (match_operand:SI 0 "s_register_operand" "") 9714 (ge:SI (match_operand:SI 1 "s_register_operand" "") 9715 (const_int 0))) 9716 (clobber (reg:CC CC_REGNUM))] 9717 "TARGET_32BIT && reload_completed" 9718 [(set (match_dup 0) (not:SI (match_dup 1))) 9719 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))]) 9720 9721(define_split 9722 [(set (match_operand:SI 0 "s_register_operand" "") 9723 (eq:SI (match_operand:SI 1 "s_register_operand" "") 9724 (const_int 0))) 9725 (clobber (reg:CC CC_REGNUM))] 9726 "arm_arch5t && TARGET_32BIT" 9727 [(set (match_dup 0) (clz:SI (match_dup 1))) 9728 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))] 9729) 9730 9731(define_split 9732 [(set (match_operand:SI 0 "s_register_operand" "") 9733 (eq:SI (match_operand:SI 1 "s_register_operand" "") 9734 (const_int 0))) 9735 (clobber (reg:CC CC_REGNUM))] 9736 "TARGET_32BIT && reload_completed" 9737 [(parallel 9738 [(set (reg:CC CC_REGNUM) 9739 (compare:CC (const_int 1) (match_dup 1))) 9740 (set (match_dup 0) 9741 (minus:SI (const_int 1) (match_dup 1)))]) 9742 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0)) 9743 (set (match_dup 0) (const_int 0)))]) 9744 9745(define_split 9746 [(set (match_operand:SI 0 "s_register_operand" "") 9747 (ne:SI (match_operand:SI 1 "s_register_operand" "") 9748 (match_operand:SI 2 "const_int_operand" ""))) 9749 (clobber (reg:CC CC_REGNUM))] 9750 "TARGET_32BIT && reload_completed" 9751 [(parallel 9752 [(set (reg:CC CC_REGNUM) 9753 (compare:CC (match_dup 1) (match_dup 2))) 9754 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]) 9755 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0)) 9756 (set (match_dup 0) (const_int 1)))] 9757{ 9758 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode); 9759}) 9760 9761(define_split 9762 [(set (match_operand:SI 0 "s_register_operand" "") 9763 (ne:SI (match_operand:SI 1 "s_register_operand" "") 9764 (match_operand:SI 2 "arm_add_operand" ""))) 9765 (clobber (reg:CC CC_REGNUM))] 9766 "TARGET_32BIT && reload_completed" 9767 [(parallel 9768 [(set (reg:CC_NZ CC_REGNUM) 9769 (compare:CC_NZ (minus:SI (match_dup 1) (match_dup 2)) 9770 (const_int 0))) 9771 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))]) 9772 (cond_exec (ne:CC_NZ (reg:CC_NZ CC_REGNUM) (const_int 0)) 9773 (set (match_dup 0) (const_int 1)))]) 9774 9775(define_insn_and_split "*compare_scc" 9776 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts") 9777 (match_operator:SI 1 "arm_comparison_operator" 9778 [(match_operand:SI 2 "s_register_operand" "r,r") 9779 (match_operand:SI 3 "arm_add_operand" "rI,L")])) 9780 (clobber (reg:CC CC_REGNUM))] 9781 "TARGET_32BIT" 9782 "#" 9783 "&& reload_completed" 9784 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3))) 9785 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0))) 9786 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))] 9787{ 9788 rtx tmp1; 9789 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]), 9790 operands[2], operands[3]); 9791 enum rtx_code rc = GET_CODE (operands[1]); 9792 9793 tmp1 = gen_rtx_REG (mode, CC_REGNUM); 9794 9795 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx); 9796 if (mode == CCFPmode || mode == CCFPEmode) 9797 rc = reverse_condition_maybe_unordered (rc); 9798 else 9799 rc = reverse_condition (rc); 9800 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx); 9801} 9802 [(set_attr "type" "multiple")] 9803) 9804 9805;; Attempt to improve the sequence generated by the compare_scc splitters 9806;; not to use conditional execution. 9807 9808;; Rd = (eq (reg1) (const_int0)) // ARMv5 9809;; clz Rd, reg1 9810;; lsr Rd, Rd, #5 9811(define_peephole2 9812 [(set (reg:CC CC_REGNUM) 9813 (compare:CC (match_operand:SI 1 "register_operand" "") 9814 (const_int 0))) 9815 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0)) 9816 (set (match_operand:SI 0 "register_operand" "") (const_int 0))) 9817 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0)) 9818 (set (match_dup 0) (const_int 1)))] 9819 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)" 9820 [(set (match_dup 0) (clz:SI (match_dup 1))) 9821 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))] 9822) 9823 9824;; Rd = (eq (reg1) (const_int0)) // !ARMv5 9825;; negs Rd, reg1 9826;; adc Rd, Rd, reg1 9827(define_peephole2 9828 [(set (reg:CC CC_REGNUM) 9829 (compare:CC (match_operand:SI 1 "register_operand" "") 9830 (const_int 0))) 9831 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0)) 9832 (set (match_operand:SI 0 "register_operand" "") (const_int 0))) 9833 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0)) 9834 (set (match_dup 0) (const_int 1))) 9835 (match_scratch:SI 2 "r")] 9836 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)" 9837 [(parallel 9838 [(set (reg:CC CC_REGNUM) 9839 (compare:CC (const_int 0) (match_dup 1))) 9840 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))]) 9841 (set (match_dup 0) 9842 (plus:SI (plus:SI (match_dup 1) (match_dup 2)) 9843 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))] 9844) 9845 9846;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed. 9847;; sub Rd, Reg1, reg2 9848;; clz Rd, Rd 9849;; lsr Rd, Rd, #5 9850(define_peephole2 9851 [(set (reg:CC CC_REGNUM) 9852 (compare:CC (match_operand:SI 1 "register_operand" "") 9853 (match_operand:SI 2 "arm_rhs_operand" ""))) 9854 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0)) 9855 (set (match_operand:SI 0 "register_operand" "") (const_int 0))) 9856 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0)) 9857 (set (match_dup 0) (const_int 1)))] 9858 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM) 9859 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())" 9860 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2))) 9861 (set (match_dup 0) (clz:SI (match_dup 0))) 9862 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))] 9863) 9864 9865 9866;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size. 9867;; sub T1, Reg1, reg2 9868;; negs Rd, T1 9869;; adc Rd, Rd, T1 9870(define_peephole2 9871 [(set (reg:CC CC_REGNUM) 9872 (compare:CC (match_operand:SI 1 "register_operand" "") 9873 (match_operand:SI 2 "arm_rhs_operand" ""))) 9874 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0)) 9875 (set (match_operand:SI 0 "register_operand" "") (const_int 0))) 9876 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0)) 9877 (set (match_dup 0) (const_int 1))) 9878 (match_scratch:SI 3 "r")] 9879 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)" 9880 [(set (match_dup 3) (match_dup 4)) 9881 (parallel 9882 [(set (reg:CC CC_REGNUM) 9883 (compare:CC (const_int 0) (match_dup 3))) 9884 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))]) 9885 (set (match_dup 0) 9886 (plus:SI (plus:SI (match_dup 0) (match_dup 3)) 9887 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))] 9888 " 9889 if (CONST_INT_P (operands[2])) 9890 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2])); 9891 else 9892 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]); 9893 ") 9894 9895(define_insn "*cond_move" 9896 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") 9897 (if_then_else:SI (match_operator 3 "equality_operator" 9898 [(match_operator 4 "arm_comparison_operator" 9899 [(match_operand 5 "cc_register" "") (const_int 0)]) 9900 (const_int 0)]) 9901 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI") 9902 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))] 9903 "TARGET_ARM" 9904 "* 9905 if (GET_CODE (operands[3]) == NE) 9906 { 9907 if (which_alternative != 1) 9908 output_asm_insn (\"mov%D4\\t%0, %2\", operands); 9909 if (which_alternative != 0) 9910 output_asm_insn (\"mov%d4\\t%0, %1\", operands); 9911 return \"\"; 9912 } 9913 if (which_alternative != 0) 9914 output_asm_insn (\"mov%D4\\t%0, %1\", operands); 9915 if (which_alternative != 1) 9916 output_asm_insn (\"mov%d4\\t%0, %2\", operands); 9917 return \"\"; 9918 " 9919 [(set_attr "conds" "use") 9920 (set_attr_alternative "type" 9921 [(if_then_else (match_operand 2 "const_int_operand" "") 9922 (const_string "mov_imm") 9923 (const_string "mov_reg")) 9924 (if_then_else (match_operand 1 "const_int_operand" "") 9925 (const_string "mov_imm") 9926 (const_string "mov_reg")) 9927 (const_string "multiple")]) 9928 (set_attr "length" "4,4,8")] 9929) 9930 9931(define_insn "*cond_arith" 9932 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 9933 (match_operator:SI 5 "shiftable_operator" 9934 [(match_operator:SI 4 "arm_comparison_operator" 9935 [(match_operand:SI 2 "s_register_operand" "r,r") 9936 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")]) 9937 (match_operand:SI 1 "s_register_operand" "0,?r")])) 9938 (clobber (reg:CC CC_REGNUM))] 9939 "TARGET_ARM" 9940 "* 9941 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx) 9942 return \"%i5\\t%0, %1, %2, lsr #31\"; 9943 9944 output_asm_insn (\"cmp\\t%2, %3\", operands); 9945 if (GET_CODE (operands[5]) == AND) 9946 output_asm_insn (\"mov%D4\\t%0, #0\", operands); 9947 else if (GET_CODE (operands[5]) == MINUS) 9948 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands); 9949 else if (which_alternative != 0) 9950 output_asm_insn (\"mov%D4\\t%0, %1\", operands); 9951 return \"%i5%d4\\t%0, %1, #1\"; 9952 " 9953 [(set_attr "conds" "clob") 9954 (set_attr "length" "12") 9955 (set_attr "type" "multiple")] 9956) 9957 9958(define_insn "*cond_sub" 9959 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 9960 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r") 9961 (match_operator:SI 4 "arm_comparison_operator" 9962 [(match_operand:SI 2 "s_register_operand" "r,r") 9963 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")]))) 9964 (clobber (reg:CC CC_REGNUM))] 9965 "TARGET_ARM" 9966 "* 9967 output_asm_insn (\"cmp\\t%2, %3\", operands); 9968 if (which_alternative != 0) 9969 output_asm_insn (\"mov%D4\\t%0, %1\", operands); 9970 return \"sub%d4\\t%0, %1, #1\"; 9971 " 9972 [(set_attr "conds" "clob") 9973 (set_attr "length" "8,12") 9974 (set_attr "type" "multiple")] 9975) 9976 9977(define_insn "*cmp_ite0" 9978 [(set (match_operand 6 "dominant_cc_register" "") 9979 (compare 9980 (if_then_else:SI 9981 (match_operator 4 "arm_comparison_operator" 9982 [(match_operand:SI 0 "s_register_operand" 9983 "l,l,l,r,r,r,r,r,r") 9984 (match_operand:SI 1 "arm_add_operand" 9985 "lPy,lPy,lPy,rI,L,rI,L,rI,L")]) 9986 (match_operator:SI 5 "arm_comparison_operator" 9987 [(match_operand:SI 2 "s_register_operand" 9988 "l,r,r,l,l,r,r,r,r") 9989 (match_operand:SI 3 "arm_add_operand" 9990 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]) 9991 (const_int 0)) 9992 (const_int 0)))] 9993 "TARGET_32BIT" 9994 "* 9995 { 9996 static const char * const cmp1[NUM_OF_COND_CMP][2] = 9997 { 9998 {\"cmp%d5\\t%0, %1\", 9999 \"cmp%d4\\t%2, %3\"}, 10000 {\"cmn%d5\\t%0, #%n1\", 10001 \"cmp%d4\\t%2, %3\"}, 10002 {\"cmp%d5\\t%0, %1\", 10003 \"cmn%d4\\t%2, #%n3\"}, 10004 {\"cmn%d5\\t%0, #%n1\", 10005 \"cmn%d4\\t%2, #%n3\"} 10006 }; 10007 static const char * const cmp2[NUM_OF_COND_CMP][2] = 10008 { 10009 {\"cmp\\t%2, %3\", 10010 \"cmp\\t%0, %1\"}, 10011 {\"cmp\\t%2, %3\", 10012 \"cmn\\t%0, #%n1\"}, 10013 {\"cmn\\t%2, #%n3\", 10014 \"cmp\\t%0, %1\"}, 10015 {\"cmn\\t%2, #%n3\", 10016 \"cmn\\t%0, #%n1\"} 10017 }; 10018 static const char * const ite[2] = 10019 { 10020 \"it\\t%d5\", 10021 \"it\\t%d4\" 10022 }; 10023 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN, 10024 CMP_CMP, CMN_CMP, CMP_CMP, 10025 CMN_CMP, CMP_CMN, CMN_CMN}; 10026 int swap = 10027 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4])); 10028 10029 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands); 10030 if (TARGET_THUMB2) { 10031 output_asm_insn (ite[swap], operands); 10032 } 10033 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands); 10034 return \"\"; 10035 }" 10036 [(set_attr "conds" "set") 10037 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any") 10038 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no") 10039 (set_attr "type" "multiple") 10040 (set_attr_alternative "length" 10041 [(const_int 6) 10042 (const_int 8) 10043 (const_int 8) 10044 (const_int 8) 10045 (const_int 8) 10046 (if_then_else (eq_attr "is_thumb" "no") 10047 (const_int 8) 10048 (const_int 10)) 10049 (if_then_else (eq_attr "is_thumb" "no") 10050 (const_int 8) 10051 (const_int 10)) 10052 (if_then_else (eq_attr "is_thumb" "no") 10053 (const_int 8) 10054 (const_int 10)) 10055 (if_then_else (eq_attr "is_thumb" "no") 10056 (const_int 8) 10057 (const_int 10))])] 10058) 10059 10060(define_insn "*cmp_ite1" 10061 [(set (match_operand 6 "dominant_cc_register" "") 10062 (compare 10063 (if_then_else:SI 10064 (match_operator 4 "arm_comparison_operator" 10065 [(match_operand:SI 0 "s_register_operand" 10066 "l,l,l,r,r,r,r,r,r") 10067 (match_operand:SI 1 "arm_add_operand" 10068 "lPy,lPy,lPy,rI,L,rI,L,rI,L")]) 10069 (match_operator:SI 5 "arm_comparison_operator" 10070 [(match_operand:SI 2 "s_register_operand" 10071 "l,r,r,l,l,r,r,r,r") 10072 (match_operand:SI 3 "arm_add_operand" 10073 "lPy,rI,L,lPy,lPy,rI,rI,L,L")]) 10074 (const_int 1)) 10075 (const_int 0)))] 10076 "TARGET_32BIT" 10077 "* 10078 { 10079 static const char * const cmp1[NUM_OF_COND_CMP][2] = 10080 { 10081 {\"cmp\\t%0, %1\", 10082 \"cmp\\t%2, %3\"}, 10083 {\"cmn\\t%0, #%n1\", 10084 \"cmp\\t%2, %3\"}, 10085 {\"cmp\\t%0, %1\", 10086 \"cmn\\t%2, #%n3\"}, 10087 {\"cmn\\t%0, #%n1\", 10088 \"cmn\\t%2, #%n3\"} 10089 }; 10090 static const char * const cmp2[NUM_OF_COND_CMP][2] = 10091 { 10092 {\"cmp%d4\\t%2, %3\", 10093 \"cmp%D5\\t%0, %1\"}, 10094 {\"cmp%d4\\t%2, %3\", 10095 \"cmn%D5\\t%0, #%n1\"}, 10096 {\"cmn%d4\\t%2, #%n3\", 10097 \"cmp%D5\\t%0, %1\"}, 10098 {\"cmn%d4\\t%2, #%n3\", 10099 \"cmn%D5\\t%0, #%n1\"} 10100 }; 10101 static const char * const ite[2] = 10102 { 10103 \"it\\t%d4\", 10104 \"it\\t%D5\" 10105 }; 10106 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN, 10107 CMP_CMP, CMN_CMP, CMP_CMP, 10108 CMN_CMP, CMP_CMN, CMN_CMN}; 10109 int swap = 10110 comparison_dominates_p (GET_CODE (operands[5]), 10111 reverse_condition (GET_CODE (operands[4]))); 10112 10113 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands); 10114 if (TARGET_THUMB2) { 10115 output_asm_insn (ite[swap], operands); 10116 } 10117 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands); 10118 return \"\"; 10119 }" 10120 [(set_attr "conds" "set") 10121 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any") 10122 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no") 10123 (set_attr_alternative "length" 10124 [(const_int 6) 10125 (const_int 8) 10126 (const_int 8) 10127 (const_int 8) 10128 (const_int 8) 10129 (if_then_else (eq_attr "is_thumb" "no") 10130 (const_int 8) 10131 (const_int 10)) 10132 (if_then_else (eq_attr "is_thumb" "no") 10133 (const_int 8) 10134 (const_int 10)) 10135 (if_then_else (eq_attr "is_thumb" "no") 10136 (const_int 8) 10137 (const_int 10)) 10138 (if_then_else (eq_attr "is_thumb" "no") 10139 (const_int 8) 10140 (const_int 10))]) 10141 (set_attr "type" "multiple")] 10142) 10143 10144(define_insn "*cmp_and" 10145 [(set (match_operand 6 "dominant_cc_register" "") 10146 (compare 10147 (and:SI 10148 (match_operator 4 "arm_comparison_operator" 10149 [(match_operand:SI 0 "s_register_operand" 10150 "l,l,l,r,r,r,r,r,r,r") 10151 (match_operand:SI 1 "arm_add_operand" 10152 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")]) 10153 (match_operator:SI 5 "arm_comparison_operator" 10154 [(match_operand:SI 2 "s_register_operand" 10155 "l,r,r,l,l,r,r,r,r,r") 10156 (match_operand:SI 3 "arm_add_operand" 10157 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")])) 10158 (const_int 0)))] 10159 "TARGET_32BIT" 10160 "* 10161 { 10162 static const char *const cmp1[NUM_OF_COND_CMP][2] = 10163 { 10164 {\"cmp%d5\\t%0, %1\", 10165 \"cmp%d4\\t%2, %3\"}, 10166 {\"cmn%d5\\t%0, #%n1\", 10167 \"cmp%d4\\t%2, %3\"}, 10168 {\"cmp%d5\\t%0, %1\", 10169 \"cmn%d4\\t%2, #%n3\"}, 10170 {\"cmn%d5\\t%0, #%n1\", 10171 \"cmn%d4\\t%2, #%n3\"} 10172 }; 10173 static const char *const cmp2[NUM_OF_COND_CMP][2] = 10174 { 10175 {\"cmp\\t%2, %3\", 10176 \"cmp\\t%0, %1\"}, 10177 {\"cmp\\t%2, %3\", 10178 \"cmn\\t%0, #%n1\"}, 10179 {\"cmn\\t%2, #%n3\", 10180 \"cmp\\t%0, %1\"}, 10181 {\"cmn\\t%2, #%n3\", 10182 \"cmn\\t%0, #%n1\"} 10183 }; 10184 static const char *const ite[2] = 10185 { 10186 \"it\\t%d5\", 10187 \"it\\t%d4\" 10188 }; 10189 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN, 10190 CMP_CMP, CMN_CMP, CMP_CMP, 10191 CMP_CMP, CMN_CMP, CMP_CMN, 10192 CMN_CMN}; 10193 int swap = 10194 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4])); 10195 10196 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands); 10197 if (TARGET_THUMB2) { 10198 output_asm_insn (ite[swap], operands); 10199 } 10200 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands); 10201 return \"\"; 10202 }" 10203 [(set_attr "conds" "set") 10204 (set_attr "predicable" "no") 10205 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any") 10206 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no") 10207 (set_attr_alternative "length" 10208 [(const_int 6) 10209 (const_int 8) 10210 (const_int 8) 10211 (const_int 8) 10212 (const_int 8) 10213 (const_int 6) 10214 (if_then_else (eq_attr "is_thumb" "no") 10215 (const_int 8) 10216 (const_int 10)) 10217 (if_then_else (eq_attr "is_thumb" "no") 10218 (const_int 8) 10219 (const_int 10)) 10220 (if_then_else (eq_attr "is_thumb" "no") 10221 (const_int 8) 10222 (const_int 10)) 10223 (if_then_else (eq_attr "is_thumb" "no") 10224 (const_int 8) 10225 (const_int 10))]) 10226 (set_attr "type" "multiple")] 10227) 10228 10229(define_insn "*cmp_ior" 10230 [(set (match_operand 6 "dominant_cc_register" "") 10231 (compare 10232 (ior:SI 10233 (match_operator 4 "arm_comparison_operator" 10234 [(match_operand:SI 0 "s_register_operand" 10235 "l,l,l,r,r,r,r,r,r,r") 10236 (match_operand:SI 1 "arm_add_operand" 10237 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")]) 10238 (match_operator:SI 5 "arm_comparison_operator" 10239 [(match_operand:SI 2 "s_register_operand" 10240 "l,r,r,l,l,r,r,r,r,r") 10241 (match_operand:SI 3 "arm_add_operand" 10242 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")])) 10243 (const_int 0)))] 10244 "TARGET_32BIT" 10245 "* 10246 { 10247 static const char *const cmp1[NUM_OF_COND_CMP][2] = 10248 { 10249 {\"cmp\\t%0, %1\", 10250 \"cmp\\t%2, %3\"}, 10251 {\"cmn\\t%0, #%n1\", 10252 \"cmp\\t%2, %3\"}, 10253 {\"cmp\\t%0, %1\", 10254 \"cmn\\t%2, #%n3\"}, 10255 {\"cmn\\t%0, #%n1\", 10256 \"cmn\\t%2, #%n3\"} 10257 }; 10258 static const char *const cmp2[NUM_OF_COND_CMP][2] = 10259 { 10260 {\"cmp%D4\\t%2, %3\", 10261 \"cmp%D5\\t%0, %1\"}, 10262 {\"cmp%D4\\t%2, %3\", 10263 \"cmn%D5\\t%0, #%n1\"}, 10264 {\"cmn%D4\\t%2, #%n3\", 10265 \"cmp%D5\\t%0, %1\"}, 10266 {\"cmn%D4\\t%2, #%n3\", 10267 \"cmn%D5\\t%0, #%n1\"} 10268 }; 10269 static const char *const ite[2] = 10270 { 10271 \"it\\t%D4\", 10272 \"it\\t%D5\" 10273 }; 10274 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN, 10275 CMP_CMP, CMN_CMP, CMP_CMP, 10276 CMP_CMP, CMN_CMP, CMP_CMN, 10277 CMN_CMN}; 10278 int swap = 10279 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4])); 10280 10281 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands); 10282 if (TARGET_THUMB2) { 10283 output_asm_insn (ite[swap], operands); 10284 } 10285 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands); 10286 return \"\"; 10287 } 10288 " 10289 [(set_attr "conds" "set") 10290 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any") 10291 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no") 10292 (set_attr_alternative "length" 10293 [(const_int 6) 10294 (const_int 8) 10295 (const_int 8) 10296 (const_int 8) 10297 (const_int 8) 10298 (const_int 6) 10299 (if_then_else (eq_attr "is_thumb" "no") 10300 (const_int 8) 10301 (const_int 10)) 10302 (if_then_else (eq_attr "is_thumb" "no") 10303 (const_int 8) 10304 (const_int 10)) 10305 (if_then_else (eq_attr "is_thumb" "no") 10306 (const_int 8) 10307 (const_int 10)) 10308 (if_then_else (eq_attr "is_thumb" "no") 10309 (const_int 8) 10310 (const_int 10))]) 10311 (set_attr "type" "multiple")] 10312) 10313 10314(define_insn_and_split "*ior_scc_scc" 10315 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts") 10316 (ior:SI (match_operator:SI 3 "arm_comparison_operator" 10317 [(match_operand:SI 1 "s_register_operand" "l,r") 10318 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")]) 10319 (match_operator:SI 6 "arm_comparison_operator" 10320 [(match_operand:SI 4 "s_register_operand" "l,r") 10321 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))) 10322 (clobber (reg:CC CC_REGNUM))] 10323 "TARGET_32BIT 10324 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y) 10325 != CCmode)" 10326 "#" 10327 "TARGET_32BIT && reload_completed" 10328 [(set (match_dup 7) 10329 (compare 10330 (ior:SI 10331 (match_op_dup 3 [(match_dup 1) (match_dup 2)]) 10332 (match_op_dup 6 [(match_dup 4) (match_dup 5)])) 10333 (const_int 0))) 10334 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))] 10335 "operands[7] 10336 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6], 10337 DOM_CC_X_OR_Y), 10338 CC_REGNUM);" 10339 [(set_attr "conds" "clob") 10340 (set_attr "enabled_for_short_it" "yes,no") 10341 (set_attr "length" "16") 10342 (set_attr "type" "multiple")] 10343) 10344 10345; If the above pattern is followed by a CMP insn, then the compare is 10346; redundant, since we can rework the conditional instruction that follows. 10347(define_insn_and_split "*ior_scc_scc_cmp" 10348 [(set (match_operand 0 "dominant_cc_register" "") 10349 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator" 10350 [(match_operand:SI 1 "s_register_operand" "l,r") 10351 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")]) 10352 (match_operator:SI 6 "arm_comparison_operator" 10353 [(match_operand:SI 4 "s_register_operand" "l,r") 10354 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])) 10355 (const_int 0))) 10356 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts") 10357 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)]) 10358 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))] 10359 "TARGET_32BIT" 10360 "#" 10361 "TARGET_32BIT && reload_completed" 10362 [(set (match_dup 0) 10363 (compare 10364 (ior:SI 10365 (match_op_dup 3 [(match_dup 1) (match_dup 2)]) 10366 (match_op_dup 6 [(match_dup 4) (match_dup 5)])) 10367 (const_int 0))) 10368 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))] 10369 "" 10370 [(set_attr "conds" "set") 10371 (set_attr "enabled_for_short_it" "yes,no") 10372 (set_attr "length" "16") 10373 (set_attr "type" "multiple")] 10374) 10375 10376(define_insn_and_split "*and_scc_scc" 10377 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts") 10378 (and:SI (match_operator:SI 3 "arm_comparison_operator" 10379 [(match_operand:SI 1 "s_register_operand" "l,r") 10380 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")]) 10381 (match_operator:SI 6 "arm_comparison_operator" 10382 [(match_operand:SI 4 "s_register_operand" "l,r") 10383 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))) 10384 (clobber (reg:CC CC_REGNUM))] 10385 "TARGET_32BIT 10386 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y) 10387 != CCmode)" 10388 "#" 10389 "TARGET_32BIT && reload_completed 10390 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y) 10391 != CCmode)" 10392 [(set (match_dup 7) 10393 (compare 10394 (and:SI 10395 (match_op_dup 3 [(match_dup 1) (match_dup 2)]) 10396 (match_op_dup 6 [(match_dup 4) (match_dup 5)])) 10397 (const_int 0))) 10398 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))] 10399 "operands[7] 10400 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6], 10401 DOM_CC_X_AND_Y), 10402 CC_REGNUM);" 10403 [(set_attr "conds" "clob") 10404 (set_attr "enabled_for_short_it" "yes,no") 10405 (set_attr "length" "16") 10406 (set_attr "type" "multiple")] 10407) 10408 10409; If the above pattern is followed by a CMP insn, then the compare is 10410; redundant, since we can rework the conditional instruction that follows. 10411(define_insn_and_split "*and_scc_scc_cmp" 10412 [(set (match_operand 0 "dominant_cc_register" "") 10413 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator" 10414 [(match_operand:SI 1 "s_register_operand" "l,r") 10415 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")]) 10416 (match_operator:SI 6 "arm_comparison_operator" 10417 [(match_operand:SI 4 "s_register_operand" "l,r") 10418 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])) 10419 (const_int 0))) 10420 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts") 10421 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)]) 10422 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))] 10423 "TARGET_32BIT" 10424 "#" 10425 "TARGET_32BIT && reload_completed" 10426 [(set (match_dup 0) 10427 (compare 10428 (and:SI 10429 (match_op_dup 3 [(match_dup 1) (match_dup 2)]) 10430 (match_op_dup 6 [(match_dup 4) (match_dup 5)])) 10431 (const_int 0))) 10432 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))] 10433 "" 10434 [(set_attr "conds" "set") 10435 (set_attr "enabled_for_short_it" "yes,no") 10436 (set_attr "length" "16") 10437 (set_attr "type" "multiple")] 10438) 10439 10440;; If there is no dominance in the comparison, then we can still save an 10441;; instruction in the AND case, since we can know that the second compare 10442;; need only zero the value if false (if true, then the value is already 10443;; correct). 10444(define_insn_and_split "*and_scc_scc_nodom" 10445 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts") 10446 (and:SI (match_operator:SI 3 "arm_comparison_operator" 10447 [(match_operand:SI 1 "s_register_operand" "r,r,0") 10448 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")]) 10449 (match_operator:SI 6 "arm_comparison_operator" 10450 [(match_operand:SI 4 "s_register_operand" "r,r,r") 10451 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")]))) 10452 (clobber (reg:CC CC_REGNUM))] 10453 "TARGET_32BIT 10454 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y) 10455 == CCmode)" 10456 "#" 10457 "TARGET_32BIT && reload_completed" 10458 [(parallel [(set (match_dup 0) 10459 (match_op_dup 3 [(match_dup 1) (match_dup 2)])) 10460 (clobber (reg:CC CC_REGNUM))]) 10461 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)])) 10462 (set (match_dup 0) 10463 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)]) 10464 (match_dup 0) 10465 (const_int 0)))] 10466 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]), 10467 operands[4], operands[5]), 10468 CC_REGNUM); 10469 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4], 10470 operands[5]);" 10471 [(set_attr "conds" "clob") 10472 (set_attr "length" "20") 10473 (set_attr "type" "multiple")] 10474) 10475 10476(define_split 10477 [(set (reg:CC_NZ CC_REGNUM) 10478 (compare:CC_NZ (ior:SI 10479 (and:SI (match_operand:SI 0 "s_register_operand" "") 10480 (const_int 1)) 10481 (match_operator:SI 1 "arm_comparison_operator" 10482 [(match_operand:SI 2 "s_register_operand" "") 10483 (match_operand:SI 3 "arm_add_operand" "")])) 10484 (const_int 0))) 10485 (clobber (match_operand:SI 4 "s_register_operand" ""))] 10486 "TARGET_ARM" 10487 [(set (match_dup 4) 10488 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)]) 10489 (match_dup 0))) 10490 (set (reg:CC_NZ CC_REGNUM) 10491 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1)) 10492 (const_int 0)))] 10493 "") 10494 10495(define_split 10496 [(set (reg:CC_NZ CC_REGNUM) 10497 (compare:CC_NZ (ior:SI 10498 (match_operator:SI 1 "arm_comparison_operator" 10499 [(match_operand:SI 2 "s_register_operand" "") 10500 (match_operand:SI 3 "arm_add_operand" "")]) 10501 (and:SI (match_operand:SI 0 "s_register_operand" "") 10502 (const_int 1))) 10503 (const_int 0))) 10504 (clobber (match_operand:SI 4 "s_register_operand" ""))] 10505 "TARGET_ARM" 10506 [(set (match_dup 4) 10507 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)]) 10508 (match_dup 0))) 10509 (set (reg:CC_NZ CC_REGNUM) 10510 (compare:CC_NZ (and:SI (match_dup 4) (const_int 1)) 10511 (const_int 0)))] 10512 "") 10513;; ??? The conditional patterns above need checking for Thumb-2 usefulness 10514 10515(define_insn_and_split "*negscc" 10516 [(set (match_operand:SI 0 "s_register_operand" "=r") 10517 (neg:SI (match_operator 3 "arm_comparison_operator" 10518 [(match_operand:SI 1 "s_register_operand" "r") 10519 (match_operand:SI 2 "arm_rhs_operand" "rI")]))) 10520 (clobber (reg:CC CC_REGNUM))] 10521 "TARGET_ARM" 10522 "#" 10523 "&& reload_completed" 10524 [(const_int 0)] 10525 { 10526 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM); 10527 10528 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx) 10529 { 10530 /* Emit mov\\t%0, %1, asr #31 */ 10531 emit_insn (gen_rtx_SET (operands[0], 10532 gen_rtx_ASHIFTRT (SImode, 10533 operands[1], 10534 GEN_INT (31)))); 10535 DONE; 10536 } 10537 else if (GET_CODE (operands[3]) == NE) 10538 { 10539 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */ 10540 if (CONST_INT_P (operands[2])) 10541 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2], 10542 gen_int_mode (-INTVAL (operands[2]), 10543 SImode))); 10544 else 10545 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2])); 10546 10547 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 10548 gen_rtx_NE (SImode, 10549 cc_reg, 10550 const0_rtx), 10551 gen_rtx_SET (operands[0], 10552 GEN_INT (~0)))); 10553 DONE; 10554 } 10555 else 10556 { 10557 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */ 10558 emit_insn (gen_rtx_SET (cc_reg, 10559 gen_rtx_COMPARE (CCmode, operands[1], operands[2]))); 10560 enum rtx_code rc = GET_CODE (operands[3]); 10561 10562 rc = reverse_condition (rc); 10563 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 10564 gen_rtx_fmt_ee (rc, 10565 VOIDmode, 10566 cc_reg, 10567 const0_rtx), 10568 gen_rtx_SET (operands[0], const0_rtx))); 10569 rc = GET_CODE (operands[3]); 10570 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 10571 gen_rtx_fmt_ee (rc, 10572 VOIDmode, 10573 cc_reg, 10574 const0_rtx), 10575 gen_rtx_SET (operands[0], 10576 GEN_INT (~0)))); 10577 DONE; 10578 } 10579 FAIL; 10580 } 10581 [(set_attr "conds" "clob") 10582 (set_attr "length" "12") 10583 (set_attr "type" "multiple")] 10584) 10585 10586(define_insn_and_split "movcond_addsi" 10587 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r") 10588 (if_then_else:SI 10589 (match_operator 5 "comparison_operator" 10590 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r") 10591 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")) 10592 (const_int 0)]) 10593 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r") 10594 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r"))) 10595 (clobber (reg:CC CC_REGNUM))] 10596 "TARGET_32BIT" 10597 "#" 10598 "&& reload_completed" 10599 [(set (reg:CC_NZ CC_REGNUM) 10600 (compare:CC_NZ 10601 (plus:SI (match_dup 3) 10602 (match_dup 4)) 10603 (const_int 0))) 10604 (set (match_dup 0) (match_dup 1)) 10605 (cond_exec (match_dup 6) 10606 (set (match_dup 0) (match_dup 2)))] 10607 " 10608 { 10609 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]), 10610 operands[3], operands[4]); 10611 enum rtx_code rc = GET_CODE (operands[5]); 10612 operands[6] = gen_rtx_REG (mode, CC_REGNUM); 10613 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode)); 10614 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0])) 10615 rc = reverse_condition (rc); 10616 else 10617 std::swap (operands[1], operands[2]); 10618 10619 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx); 10620 } 10621 " 10622 [(set_attr "conds" "clob") 10623 (set_attr "enabled_for_short_it" "no,yes,yes") 10624 (set_attr "type" "multiple")] 10625) 10626 10627(define_insn "movcond" 10628 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") 10629 (if_then_else:SI 10630 (match_operator 5 "arm_comparison_operator" 10631 [(match_operand:SI 3 "s_register_operand" "r,r,r") 10632 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")]) 10633 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI") 10634 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))) 10635 (clobber (reg:CC CC_REGNUM))] 10636 "TARGET_ARM" 10637 "* 10638 if (GET_CODE (operands[5]) == LT 10639 && (operands[4] == const0_rtx)) 10640 { 10641 if (which_alternative != 1 && REG_P (operands[1])) 10642 { 10643 if (operands[2] == const0_rtx) 10644 return \"and\\t%0, %1, %3, asr #31\"; 10645 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\"; 10646 } 10647 else if (which_alternative != 0 && REG_P (operands[2])) 10648 { 10649 if (operands[1] == const0_rtx) 10650 return \"bic\\t%0, %2, %3, asr #31\"; 10651 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\"; 10652 } 10653 /* The only case that falls through to here is when both ops 1 & 2 10654 are constants. */ 10655 } 10656 10657 if (GET_CODE (operands[5]) == GE 10658 && (operands[4] == const0_rtx)) 10659 { 10660 if (which_alternative != 1 && REG_P (operands[1])) 10661 { 10662 if (operands[2] == const0_rtx) 10663 return \"bic\\t%0, %1, %3, asr #31\"; 10664 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\"; 10665 } 10666 else if (which_alternative != 0 && REG_P (operands[2])) 10667 { 10668 if (operands[1] == const0_rtx) 10669 return \"and\\t%0, %2, %3, asr #31\"; 10670 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\"; 10671 } 10672 /* The only case that falls through to here is when both ops 1 & 2 10673 are constants. */ 10674 } 10675 if (CONST_INT_P (operands[4]) 10676 && !const_ok_for_arm (INTVAL (operands[4]))) 10677 output_asm_insn (\"cmn\\t%3, #%n4\", operands); 10678 else 10679 output_asm_insn (\"cmp\\t%3, %4\", operands); 10680 if (which_alternative != 0) 10681 output_asm_insn (\"mov%d5\\t%0, %1\", operands); 10682 if (which_alternative != 1) 10683 output_asm_insn (\"mov%D5\\t%0, %2\", operands); 10684 return \"\"; 10685 " 10686 [(set_attr "conds" "clob") 10687 (set_attr "length" "8,8,12") 10688 (set_attr "type" "multiple")] 10689) 10690 10691;; ??? The patterns below need checking for Thumb-2 usefulness. 10692 10693(define_insn "*ifcompare_plus_move" 10694 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 10695 (if_then_else:SI (match_operator 6 "arm_comparison_operator" 10696 [(match_operand:SI 4 "s_register_operand" "r,r") 10697 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")]) 10698 (plus:SI 10699 (match_operand:SI 2 "s_register_operand" "r,r") 10700 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")) 10701 (match_operand:SI 1 "arm_rhs_operand" "0,?rI"))) 10702 (clobber (reg:CC CC_REGNUM))] 10703 "TARGET_ARM" 10704 "#" 10705 [(set_attr "conds" "clob") 10706 (set_attr "length" "8,12") 10707 (set_attr "type" "multiple")] 10708) 10709 10710(define_insn "*if_plus_move" 10711 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r") 10712 (if_then_else:SI 10713 (match_operator 4 "arm_comparison_operator" 10714 [(match_operand 5 "cc_register" "") (const_int 0)]) 10715 (plus:SI 10716 (match_operand:SI 2 "s_register_operand" "r,r,r,r") 10717 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L")) 10718 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))] 10719 "TARGET_ARM" 10720 "@ 10721 add%d4\\t%0, %2, %3 10722 sub%d4\\t%0, %2, #%n3 10723 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1 10724 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1" 10725 [(set_attr "conds" "use") 10726 (set_attr "length" "4,4,8,8") 10727 (set_attr_alternative "type" 10728 [(if_then_else (match_operand 3 "const_int_operand" "") 10729 (const_string "alu_imm" ) 10730 (const_string "alu_sreg")) 10731 (const_string "alu_imm") 10732 (const_string "multiple") 10733 (const_string "multiple")])] 10734) 10735 10736(define_insn "*ifcompare_move_plus" 10737 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 10738 (if_then_else:SI (match_operator 6 "arm_comparison_operator" 10739 [(match_operand:SI 4 "s_register_operand" "r,r") 10740 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")]) 10741 (match_operand:SI 1 "arm_rhs_operand" "0,?rI") 10742 (plus:SI 10743 (match_operand:SI 2 "s_register_operand" "r,r") 10744 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")))) 10745 (clobber (reg:CC CC_REGNUM))] 10746 "TARGET_ARM" 10747 "#" 10748 [(set_attr "conds" "clob") 10749 (set_attr "length" "8,12") 10750 (set_attr "type" "multiple")] 10751) 10752 10753(define_insn "*if_move_plus" 10754 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r") 10755 (if_then_else:SI 10756 (match_operator 4 "arm_comparison_operator" 10757 [(match_operand 5 "cc_register" "") (const_int 0)]) 10758 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI") 10759 (plus:SI 10760 (match_operand:SI 2 "s_register_operand" "r,r,r,r") 10761 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))] 10762 "TARGET_ARM" 10763 "@ 10764 add%D4\\t%0, %2, %3 10765 sub%D4\\t%0, %2, #%n3 10766 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1 10767 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1" 10768 [(set_attr "conds" "use") 10769 (set_attr "length" "4,4,8,8") 10770 (set_attr_alternative "type" 10771 [(if_then_else (match_operand 3 "const_int_operand" "") 10772 (const_string "alu_imm" ) 10773 (const_string "alu_sreg")) 10774 (const_string "alu_imm") 10775 (const_string "multiple") 10776 (const_string "multiple")])] 10777) 10778 10779(define_insn "*ifcompare_arith_arith" 10780 [(set (match_operand:SI 0 "s_register_operand" "=r") 10781 (if_then_else:SI (match_operator 9 "arm_comparison_operator" 10782 [(match_operand:SI 5 "s_register_operand" "r") 10783 (match_operand:SI 6 "arm_add_operand" "rIL")]) 10784 (match_operator:SI 8 "shiftable_operator" 10785 [(match_operand:SI 1 "s_register_operand" "r") 10786 (match_operand:SI 2 "arm_rhs_operand" "rI")]) 10787 (match_operator:SI 7 "shiftable_operator" 10788 [(match_operand:SI 3 "s_register_operand" "r") 10789 (match_operand:SI 4 "arm_rhs_operand" "rI")]))) 10790 (clobber (reg:CC CC_REGNUM))] 10791 "TARGET_ARM" 10792 "#" 10793 [(set_attr "conds" "clob") 10794 (set_attr "length" "12") 10795 (set_attr "type" "multiple")] 10796) 10797 10798(define_insn "*if_arith_arith" 10799 [(set (match_operand:SI 0 "s_register_operand" "=r") 10800 (if_then_else:SI (match_operator 5 "arm_comparison_operator" 10801 [(match_operand 8 "cc_register" "") (const_int 0)]) 10802 (match_operator:SI 6 "shiftable_operator" 10803 [(match_operand:SI 1 "s_register_operand" "r") 10804 (match_operand:SI 2 "arm_rhs_operand" "rI")]) 10805 (match_operator:SI 7 "shiftable_operator" 10806 [(match_operand:SI 3 "s_register_operand" "r") 10807 (match_operand:SI 4 "arm_rhs_operand" "rI")])))] 10808 "TARGET_ARM" 10809 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4" 10810 [(set_attr "conds" "use") 10811 (set_attr "length" "8") 10812 (set_attr "type" "multiple")] 10813) 10814 10815(define_insn "*ifcompare_arith_move" 10816 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 10817 (if_then_else:SI (match_operator 6 "arm_comparison_operator" 10818 [(match_operand:SI 2 "s_register_operand" "r,r") 10819 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")]) 10820 (match_operator:SI 7 "shiftable_operator" 10821 [(match_operand:SI 4 "s_register_operand" "r,r") 10822 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")]) 10823 (match_operand:SI 1 "arm_rhs_operand" "0,?rI"))) 10824 (clobber (reg:CC CC_REGNUM))] 10825 "TARGET_ARM" 10826 "* 10827 /* If we have an operation where (op x 0) is the identity operation and 10828 the conditional operator is LT or GE and we are comparing against zero and 10829 everything is in registers then we can do this in two instructions. */ 10830 if (operands[3] == const0_rtx 10831 && GET_CODE (operands[7]) != AND 10832 && REG_P (operands[5]) 10833 && REG_P (operands[1]) 10834 && REGNO (operands[1]) == REGNO (operands[4]) 10835 && REGNO (operands[4]) != REGNO (operands[0])) 10836 { 10837 if (GET_CODE (operands[6]) == LT) 10838 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\"; 10839 else if (GET_CODE (operands[6]) == GE) 10840 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\"; 10841 } 10842 if (CONST_INT_P (operands[3]) 10843 && !const_ok_for_arm (INTVAL (operands[3]))) 10844 output_asm_insn (\"cmn\\t%2, #%n3\", operands); 10845 else 10846 output_asm_insn (\"cmp\\t%2, %3\", operands); 10847 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands); 10848 if (which_alternative != 0) 10849 return \"mov%D6\\t%0, %1\"; 10850 return \"\"; 10851 " 10852 [(set_attr "conds" "clob") 10853 (set_attr "length" "8,12") 10854 (set_attr "type" "multiple")] 10855) 10856 10857(define_insn "*if_arith_move" 10858 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 10859 (if_then_else:SI (match_operator 4 "arm_comparison_operator" 10860 [(match_operand 6 "cc_register" "") (const_int 0)]) 10861 (match_operator:SI 5 "shiftable_operator" 10862 [(match_operand:SI 2 "s_register_operand" "r,r") 10863 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")]) 10864 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))] 10865 "TARGET_ARM" 10866 "@ 10867 %I5%d4\\t%0, %2, %3 10868 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1" 10869 [(set_attr "conds" "use") 10870 (set_attr "length" "4,8") 10871 (set_attr_alternative "type" 10872 [(if_then_else (match_operand 3 "const_int_operand" "") 10873 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4") 10874 (const_string "alu_shift_imm_lsl_1to4") 10875 (const_string "alu_shift_imm_other")) 10876 (const_string "alu_shift_reg")) 10877 (const_string "multiple")])] 10878) 10879 10880(define_insn "*ifcompare_move_arith" 10881 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 10882 (if_then_else:SI (match_operator 6 "arm_comparison_operator" 10883 [(match_operand:SI 4 "s_register_operand" "r,r") 10884 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")]) 10885 (match_operand:SI 1 "arm_rhs_operand" "0,?rI") 10886 (match_operator:SI 7 "shiftable_operator" 10887 [(match_operand:SI 2 "s_register_operand" "r,r") 10888 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")]))) 10889 (clobber (reg:CC CC_REGNUM))] 10890 "TARGET_ARM" 10891 "* 10892 /* If we have an operation where (op x 0) is the identity operation and 10893 the conditional operator is LT or GE and we are comparing against zero and 10894 everything is in registers then we can do this in two instructions */ 10895 if (operands[5] == const0_rtx 10896 && GET_CODE (operands[7]) != AND 10897 && REG_P (operands[3]) 10898 && REG_P (operands[1]) 10899 && REGNO (operands[1]) == REGNO (operands[2]) 10900 && REGNO (operands[2]) != REGNO (operands[0])) 10901 { 10902 if (GET_CODE (operands[6]) == GE) 10903 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\"; 10904 else if (GET_CODE (operands[6]) == LT) 10905 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\"; 10906 } 10907 10908 if (CONST_INT_P (operands[5]) 10909 && !const_ok_for_arm (INTVAL (operands[5]))) 10910 output_asm_insn (\"cmn\\t%4, #%n5\", operands); 10911 else 10912 output_asm_insn (\"cmp\\t%4, %5\", operands); 10913 10914 if (which_alternative != 0) 10915 output_asm_insn (\"mov%d6\\t%0, %1\", operands); 10916 return \"%I7%D6\\t%0, %2, %3\"; 10917 " 10918 [(set_attr "conds" "clob") 10919 (set_attr "length" "8,12") 10920 (set_attr "type" "multiple")] 10921) 10922 10923(define_insn "*if_move_arith" 10924 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 10925 (if_then_else:SI 10926 (match_operator 4 "arm_comparison_operator" 10927 [(match_operand 6 "cc_register" "") (const_int 0)]) 10928 (match_operand:SI 1 "arm_rhs_operand" "0,?rI") 10929 (match_operator:SI 5 "shiftable_operator" 10930 [(match_operand:SI 2 "s_register_operand" "r,r") 10931 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))] 10932 "TARGET_ARM" 10933 "@ 10934 %I5%D4\\t%0, %2, %3 10935 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1" 10936 [(set_attr "conds" "use") 10937 (set_attr "length" "4,8") 10938 (set_attr_alternative "type" 10939 [(if_then_else (match_operand 3 "const_int_operand" "") 10940 (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4") 10941 (const_string "alu_shift_imm_lsl_1to4") 10942 (const_string "alu_shift_imm_other")) 10943 (const_string "alu_shift_reg")) 10944 (const_string "multiple")])] 10945) 10946 10947(define_insn "*ifcompare_move_not" 10948 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 10949 (if_then_else:SI 10950 (match_operator 5 "arm_comparison_operator" 10951 [(match_operand:SI 3 "s_register_operand" "r,r") 10952 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")]) 10953 (match_operand:SI 1 "arm_not_operand" "0,?rIK") 10954 (not:SI 10955 (match_operand:SI 2 "s_register_operand" "r,r")))) 10956 (clobber (reg:CC CC_REGNUM))] 10957 "TARGET_ARM" 10958 "#" 10959 [(set_attr "conds" "clob") 10960 (set_attr "length" "8,12") 10961 (set_attr "type" "multiple")] 10962) 10963 10964(define_insn "*if_move_not" 10965 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") 10966 (if_then_else:SI 10967 (match_operator 4 "arm_comparison_operator" 10968 [(match_operand 3 "cc_register" "") (const_int 0)]) 10969 (match_operand:SI 1 "arm_not_operand" "0,?rI,K") 10970 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))] 10971 "TARGET_ARM" 10972 "@ 10973 mvn%D4\\t%0, %2 10974 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2 10975 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2" 10976 [(set_attr "conds" "use") 10977 (set_attr "type" "mvn_reg") 10978 (set_attr "length" "4,8,8") 10979 (set_attr "type" "mvn_reg,multiple,multiple")] 10980) 10981 10982(define_insn "*ifcompare_not_move" 10983 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 10984 (if_then_else:SI 10985 (match_operator 5 "arm_comparison_operator" 10986 [(match_operand:SI 3 "s_register_operand" "r,r") 10987 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")]) 10988 (not:SI 10989 (match_operand:SI 2 "s_register_operand" "r,r")) 10990 (match_operand:SI 1 "arm_not_operand" "0,?rIK"))) 10991 (clobber (reg:CC CC_REGNUM))] 10992 "TARGET_ARM" 10993 "#" 10994 [(set_attr "conds" "clob") 10995 (set_attr "length" "8,12") 10996 (set_attr "type" "multiple")] 10997) 10998 10999(define_insn "*if_not_move" 11000 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") 11001 (if_then_else:SI 11002 (match_operator 4 "arm_comparison_operator" 11003 [(match_operand 3 "cc_register" "") (const_int 0)]) 11004 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r")) 11005 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))] 11006 "TARGET_ARM" 11007 "@ 11008 mvn%d4\\t%0, %2 11009 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2 11010 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2" 11011 [(set_attr "conds" "use") 11012 (set_attr "type" "mvn_reg,multiple,multiple") 11013 (set_attr "length" "4,8,8")] 11014) 11015 11016(define_insn "*ifcompare_shift_move" 11017 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 11018 (if_then_else:SI 11019 (match_operator 6 "arm_comparison_operator" 11020 [(match_operand:SI 4 "s_register_operand" "r,r") 11021 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")]) 11022 (match_operator:SI 7 "shift_operator" 11023 [(match_operand:SI 2 "s_register_operand" "r,r") 11024 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")]) 11025 (match_operand:SI 1 "arm_not_operand" "0,?rIK"))) 11026 (clobber (reg:CC CC_REGNUM))] 11027 "TARGET_ARM" 11028 "#" 11029 [(set_attr "conds" "clob") 11030 (set_attr "length" "8,12") 11031 (set_attr "type" "multiple")] 11032) 11033 11034(define_insn "*if_shift_move" 11035 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") 11036 (if_then_else:SI 11037 (match_operator 5 "arm_comparison_operator" 11038 [(match_operand 6 "cc_register" "") (const_int 0)]) 11039 (match_operator:SI 4 "shift_operator" 11040 [(match_operand:SI 2 "s_register_operand" "r,r,r") 11041 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")]) 11042 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))] 11043 "TARGET_ARM" 11044 "@ 11045 mov%d5\\t%0, %2%S4 11046 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4 11047 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4" 11048 [(set_attr "conds" "use") 11049 (set_attr "shift" "2") 11050 (set_attr "length" "4,8,8") 11051 (set_attr_alternative "type" 11052 [(if_then_else (match_operand 3 "const_int_operand" "") 11053 (const_string "mov_shift" ) 11054 (const_string "mov_shift_reg")) 11055 (const_string "multiple") 11056 (const_string "multiple")])] 11057) 11058 11059(define_insn "*ifcompare_move_shift" 11060 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 11061 (if_then_else:SI 11062 (match_operator 6 "arm_comparison_operator" 11063 [(match_operand:SI 4 "s_register_operand" "r,r") 11064 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")]) 11065 (match_operand:SI 1 "arm_not_operand" "0,?rIK") 11066 (match_operator:SI 7 "shift_operator" 11067 [(match_operand:SI 2 "s_register_operand" "r,r") 11068 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")]))) 11069 (clobber (reg:CC CC_REGNUM))] 11070 "TARGET_ARM" 11071 "#" 11072 [(set_attr "conds" "clob") 11073 (set_attr "length" "8,12") 11074 (set_attr "type" "multiple")] 11075) 11076 11077(define_insn "*if_move_shift" 11078 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r") 11079 (if_then_else:SI 11080 (match_operator 5 "arm_comparison_operator" 11081 [(match_operand 6 "cc_register" "") (const_int 0)]) 11082 (match_operand:SI 1 "arm_not_operand" "0,?rI,K") 11083 (match_operator:SI 4 "shift_operator" 11084 [(match_operand:SI 2 "s_register_operand" "r,r,r") 11085 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))] 11086 "TARGET_ARM" 11087 "@ 11088 mov%D5\\t%0, %2%S4 11089 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4 11090 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4" 11091 [(set_attr "conds" "use") 11092 (set_attr "shift" "2") 11093 (set_attr "length" "4,8,8") 11094 (set_attr_alternative "type" 11095 [(if_then_else (match_operand 3 "const_int_operand" "") 11096 (const_string "mov_shift" ) 11097 (const_string "mov_shift_reg")) 11098 (const_string "multiple") 11099 (const_string "multiple")])] 11100) 11101 11102(define_insn "*ifcompare_shift_shift" 11103 [(set (match_operand:SI 0 "s_register_operand" "=r") 11104 (if_then_else:SI 11105 (match_operator 7 "arm_comparison_operator" 11106 [(match_operand:SI 5 "s_register_operand" "r") 11107 (match_operand:SI 6 "arm_add_operand" "rIL")]) 11108 (match_operator:SI 8 "shift_operator" 11109 [(match_operand:SI 1 "s_register_operand" "r") 11110 (match_operand:SI 2 "arm_rhs_operand" "rM")]) 11111 (match_operator:SI 9 "shift_operator" 11112 [(match_operand:SI 3 "s_register_operand" "r") 11113 (match_operand:SI 4 "arm_rhs_operand" "rM")]))) 11114 (clobber (reg:CC CC_REGNUM))] 11115 "TARGET_ARM" 11116 "#" 11117 [(set_attr "conds" "clob") 11118 (set_attr "length" "12") 11119 (set_attr "type" "multiple")] 11120) 11121 11122(define_insn "*if_shift_shift" 11123 [(set (match_operand:SI 0 "s_register_operand" "=r") 11124 (if_then_else:SI 11125 (match_operator 5 "arm_comparison_operator" 11126 [(match_operand 8 "cc_register" "") (const_int 0)]) 11127 (match_operator:SI 6 "shift_operator" 11128 [(match_operand:SI 1 "s_register_operand" "r") 11129 (match_operand:SI 2 "arm_rhs_operand" "rM")]) 11130 (match_operator:SI 7 "shift_operator" 11131 [(match_operand:SI 3 "s_register_operand" "r") 11132 (match_operand:SI 4 "arm_rhs_operand" "rM")])))] 11133 "TARGET_ARM" 11134 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7" 11135 [(set_attr "conds" "use") 11136 (set_attr "shift" "1") 11137 (set_attr "length" "8") 11138 (set (attr "type") (if_then_else 11139 (and (match_operand 2 "const_int_operand" "") 11140 (match_operand 4 "const_int_operand" "")) 11141 (const_string "mov_shift") 11142 (const_string "mov_shift_reg")))] 11143) 11144 11145(define_insn "*ifcompare_not_arith" 11146 [(set (match_operand:SI 0 "s_register_operand" "=r") 11147 (if_then_else:SI 11148 (match_operator 6 "arm_comparison_operator" 11149 [(match_operand:SI 4 "s_register_operand" "r") 11150 (match_operand:SI 5 "arm_add_operand" "rIL")]) 11151 (not:SI (match_operand:SI 1 "s_register_operand" "r")) 11152 (match_operator:SI 7 "shiftable_operator" 11153 [(match_operand:SI 2 "s_register_operand" "r") 11154 (match_operand:SI 3 "arm_rhs_operand" "rI")]))) 11155 (clobber (reg:CC CC_REGNUM))] 11156 "TARGET_ARM" 11157 "#" 11158 [(set_attr "conds" "clob") 11159 (set_attr "length" "12") 11160 (set_attr "type" "multiple")] 11161) 11162 11163(define_insn "*if_not_arith" 11164 [(set (match_operand:SI 0 "s_register_operand" "=r") 11165 (if_then_else:SI 11166 (match_operator 5 "arm_comparison_operator" 11167 [(match_operand 4 "cc_register" "") (const_int 0)]) 11168 (not:SI (match_operand:SI 1 "s_register_operand" "r")) 11169 (match_operator:SI 6 "shiftable_operator" 11170 [(match_operand:SI 2 "s_register_operand" "r") 11171 (match_operand:SI 3 "arm_rhs_operand" "rI")])))] 11172 "TARGET_ARM" 11173 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3" 11174 [(set_attr "conds" "use") 11175 (set_attr "type" "mvn_reg") 11176 (set_attr "length" "8")] 11177) 11178 11179(define_insn "*ifcompare_arith_not" 11180 [(set (match_operand:SI 0 "s_register_operand" "=r") 11181 (if_then_else:SI 11182 (match_operator 6 "arm_comparison_operator" 11183 [(match_operand:SI 4 "s_register_operand" "r") 11184 (match_operand:SI 5 "arm_add_operand" "rIL")]) 11185 (match_operator:SI 7 "shiftable_operator" 11186 [(match_operand:SI 2 "s_register_operand" "r") 11187 (match_operand:SI 3 "arm_rhs_operand" "rI")]) 11188 (not:SI (match_operand:SI 1 "s_register_operand" "r")))) 11189 (clobber (reg:CC CC_REGNUM))] 11190 "TARGET_ARM" 11191 "#" 11192 [(set_attr "conds" "clob") 11193 (set_attr "length" "12") 11194 (set_attr "type" "multiple")] 11195) 11196 11197(define_insn "*if_arith_not" 11198 [(set (match_operand:SI 0 "s_register_operand" "=r") 11199 (if_then_else:SI 11200 (match_operator 5 "arm_comparison_operator" 11201 [(match_operand 4 "cc_register" "") (const_int 0)]) 11202 (match_operator:SI 6 "shiftable_operator" 11203 [(match_operand:SI 2 "s_register_operand" "r") 11204 (match_operand:SI 3 "arm_rhs_operand" "rI")]) 11205 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))] 11206 "TARGET_ARM" 11207 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3" 11208 [(set_attr "conds" "use") 11209 (set_attr "type" "multiple") 11210 (set_attr "length" "8")] 11211) 11212 11213(define_insn "*ifcompare_neg_move" 11214 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 11215 (if_then_else:SI 11216 (match_operator 5 "arm_comparison_operator" 11217 [(match_operand:SI 3 "s_register_operand" "r,r") 11218 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")]) 11219 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r")) 11220 (match_operand:SI 1 "arm_not_operand" "0,?rIK"))) 11221 (clobber (reg:CC CC_REGNUM))] 11222 "TARGET_ARM" 11223 "#" 11224 [(set_attr "conds" "clob") 11225 (set_attr "length" "8,12") 11226 (set_attr "type" "multiple")] 11227) 11228 11229(define_insn_and_split "*if_neg_move" 11230 [(set (match_operand:SI 0 "s_register_operand" "=l,r") 11231 (if_then_else:SI 11232 (match_operator 4 "arm_comparison_operator" 11233 [(match_operand 3 "cc_register" "") (const_int 0)]) 11234 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r")) 11235 (match_operand:SI 1 "s_register_operand" "0,0")))] 11236 "TARGET_32BIT && !TARGET_COND_ARITH" 11237 "#" 11238 "&& reload_completed" 11239 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)]) 11240 (set (match_dup 0) (neg:SI (match_dup 2))))] 11241 "" 11242 [(set_attr "conds" "use") 11243 (set_attr "length" "4") 11244 (set_attr "arch" "t2,32") 11245 (set_attr "enabled_for_short_it" "yes,no") 11246 (set_attr "type" "logic_shift_imm")] 11247) 11248 11249(define_insn "*ifcompare_move_neg" 11250 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 11251 (if_then_else:SI 11252 (match_operator 5 "arm_comparison_operator" 11253 [(match_operand:SI 3 "s_register_operand" "r,r") 11254 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")]) 11255 (match_operand:SI 1 "arm_not_operand" "0,?rIK") 11256 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r")))) 11257 (clobber (reg:CC CC_REGNUM))] 11258 "TARGET_ARM" 11259 "#" 11260 [(set_attr "conds" "clob") 11261 (set_attr "length" "8,12") 11262 (set_attr "type" "multiple")] 11263) 11264 11265(define_insn_and_split "*if_move_neg" 11266 [(set (match_operand:SI 0 "s_register_operand" "=l,r") 11267 (if_then_else:SI 11268 (match_operator 4 "arm_comparison_operator" 11269 [(match_operand 3 "cc_register" "") (const_int 0)]) 11270 (match_operand:SI 1 "s_register_operand" "0,0") 11271 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))] 11272 "TARGET_32BIT" 11273 "#" 11274 "&& reload_completed" 11275 [(cond_exec (match_dup 5) 11276 (set (match_dup 0) (neg:SI (match_dup 2))))] 11277 { 11278 machine_mode mode = GET_MODE (operands[3]); 11279 rtx_code rc = GET_CODE (operands[4]); 11280 11281 if (mode == CCFPmode || mode == CCFPEmode) 11282 rc = reverse_condition_maybe_unordered (rc); 11283 else 11284 rc = reverse_condition (rc); 11285 11286 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx); 11287 } 11288 [(set_attr "conds" "use") 11289 (set_attr "length" "4") 11290 (set_attr "arch" "t2,32") 11291 (set_attr "enabled_for_short_it" "yes,no") 11292 (set_attr "type" "logic_shift_imm")] 11293) 11294 11295(define_insn "*arith_adjacentmem" 11296 [(set (match_operand:SI 0 "s_register_operand" "=r") 11297 (match_operator:SI 1 "shiftable_operator" 11298 [(match_operand:SI 2 "memory_operand" "m") 11299 (match_operand:SI 3 "memory_operand" "m")])) 11300 (clobber (match_scratch:SI 4 "=r"))] 11301 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])" 11302 "* 11303 { 11304 rtx ldm[3]; 11305 rtx arith[4]; 11306 rtx base_reg; 11307 HOST_WIDE_INT val1 = 0, val2 = 0; 11308 11309 if (REGNO (operands[0]) > REGNO (operands[4])) 11310 { 11311 ldm[1] = operands[4]; 11312 ldm[2] = operands[0]; 11313 } 11314 else 11315 { 11316 ldm[1] = operands[0]; 11317 ldm[2] = operands[4]; 11318 } 11319 11320 base_reg = XEXP (operands[2], 0); 11321 11322 if (!REG_P (base_reg)) 11323 { 11324 val1 = INTVAL (XEXP (base_reg, 1)); 11325 base_reg = XEXP (base_reg, 0); 11326 } 11327 11328 if (!REG_P (XEXP (operands[3], 0))) 11329 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1)); 11330 11331 arith[0] = operands[0]; 11332 arith[3] = operands[1]; 11333 11334 if (val1 < val2) 11335 { 11336 arith[1] = ldm[1]; 11337 arith[2] = ldm[2]; 11338 } 11339 else 11340 { 11341 arith[1] = ldm[2]; 11342 arith[2] = ldm[1]; 11343 } 11344 11345 ldm[0] = base_reg; 11346 if (val1 !=0 && val2 != 0) 11347 { 11348 rtx ops[3]; 11349 11350 if (val1 == 4 || val2 == 4) 11351 /* Other val must be 8, since we know they are adjacent and neither 11352 is zero. */ 11353 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm); 11354 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1)) 11355 { 11356 ldm[0] = ops[0] = operands[4]; 11357 ops[1] = base_reg; 11358 ops[2] = GEN_INT (val1); 11359 output_add_immediate (ops); 11360 if (val1 < val2) 11361 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm); 11362 else 11363 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm); 11364 } 11365 else 11366 { 11367 /* Offset is out of range for a single add, so use two ldr. */ 11368 ops[0] = ldm[1]; 11369 ops[1] = base_reg; 11370 ops[2] = GEN_INT (val1); 11371 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops); 11372 ops[0] = ldm[2]; 11373 ops[2] = GEN_INT (val2); 11374 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops); 11375 } 11376 } 11377 else if (val1 != 0) 11378 { 11379 if (val1 < val2) 11380 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm); 11381 else 11382 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm); 11383 } 11384 else 11385 { 11386 if (val1 < val2) 11387 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm); 11388 else 11389 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm); 11390 } 11391 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith); 11392 return \"\"; 11393 }" 11394 [(set_attr "length" "12") 11395 (set_attr "predicable" "yes") 11396 (set_attr "type" "load_4")] 11397) 11398 11399; This pattern is never tried by combine, so do it as a peephole 11400 11401(define_peephole2 11402 [(set (match_operand:SI 0 "arm_general_register_operand" "") 11403 (match_operand:SI 1 "arm_general_register_operand" "")) 11404 (set (reg:CC CC_REGNUM) 11405 (compare:CC (match_dup 1) (const_int 0)))] 11406 "TARGET_ARM" 11407 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0))) 11408 (set (match_dup 0) (match_dup 1))])] 11409 "" 11410) 11411 11412(define_split 11413 [(set (match_operand:SI 0 "s_register_operand" "") 11414 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "") 11415 (const_int 0)) 11416 (neg:SI (match_operator:SI 2 "arm_comparison_operator" 11417 [(match_operand:SI 3 "s_register_operand" "") 11418 (match_operand:SI 4 "arm_rhs_operand" "")])))) 11419 (clobber (match_operand:SI 5 "s_register_operand" ""))] 11420 "TARGET_ARM" 11421 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31)))) 11422 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)]) 11423 (match_dup 5)))] 11424 "" 11425) 11426 11427;; This split can be used because CC_Z mode implies that the following 11428;; branch will be an equality, or an unsigned inequality, so the sign 11429;; extension is not needed. 11430 11431(define_split 11432 [(set (reg:CC_Z CC_REGNUM) 11433 (compare:CC_Z 11434 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0) 11435 (const_int 24)) 11436 (match_operand 1 "const_int_operand" ""))) 11437 (clobber (match_scratch:SI 2 ""))] 11438 "TARGET_ARM 11439 && ((UINTVAL (operands[1])) 11440 == ((UINTVAL (operands[1])) >> 24) << 24)" 11441 [(set (match_dup 2) (zero_extend:SI (match_dup 0))) 11442 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))] 11443 " 11444 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24); 11445 " 11446) 11447;; ??? Check the patterns above for Thumb-2 usefulness 11448 11449(define_expand "prologue" 11450 [(clobber (const_int 0))] 11451 "TARGET_EITHER" 11452 "if (TARGET_32BIT) 11453 arm_expand_prologue (); 11454 else 11455 thumb1_expand_prologue (); 11456 DONE; 11457 " 11458) 11459 11460(define_expand "epilogue" 11461 [(clobber (const_int 0))] 11462 "TARGET_EITHER" 11463 " 11464 if (crtl->calls_eh_return) 11465 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2))); 11466 if (TARGET_THUMB1) 11467 { 11468 thumb1_expand_epilogue (); 11469 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode, 11470 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE)); 11471 } 11472 else if (HAVE_return) 11473 { 11474 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence, 11475 no need for explicit testing again. */ 11476 emit_jump_insn (gen_return ()); 11477 } 11478 else if (TARGET_32BIT) 11479 { 11480 arm_expand_epilogue (true); 11481 } 11482 DONE; 11483 " 11484) 11485 11486;; Note - although unspec_volatile's USE all hard registers, 11487;; USEs are ignored after relaod has completed. Thus we need 11488;; to add an unspec of the link register to ensure that flow 11489;; does not think that it is unused by the sibcall branch that 11490;; will replace the standard function epilogue. 11491(define_expand "sibcall_epilogue" 11492 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE) 11493 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])] 11494 "TARGET_32BIT" 11495 " 11496 arm_expand_epilogue (false); 11497 DONE; 11498 " 11499) 11500 11501(define_expand "eh_epilogue" 11502 [(use (match_operand:SI 0 "register_operand")) 11503 (use (match_operand:SI 1 "register_operand")) 11504 (use (match_operand:SI 2 "register_operand"))] 11505 "TARGET_EITHER" 11506 " 11507 { 11508 cfun->machine->eh_epilogue_sp_ofs = operands[1]; 11509 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2) 11510 { 11511 rtx ra = gen_rtx_REG (Pmode, 2); 11512 11513 emit_move_insn (ra, operands[2]); 11514 operands[2] = ra; 11515 } 11516 /* This is a hack -- we may have crystalized the function type too 11517 early. */ 11518 cfun->machine->func_type = 0; 11519 }" 11520) 11521 11522;; This split is only used during output to reduce the number of patterns 11523;; that need assembler instructions adding to them. We allowed the setting 11524;; of the conditions to be implicit during rtl generation so that 11525;; the conditional compare patterns would work. However this conflicts to 11526;; some extent with the conditional data operations, so we have to split them 11527;; up again here. 11528 11529;; ??? Need to audit these splitters for Thumb-2. Why isn't normal 11530;; conditional execution sufficient? 11531 11532(define_split 11533 [(set (match_operand:SI 0 "s_register_operand" "") 11534 (if_then_else:SI (match_operator 1 "arm_comparison_operator" 11535 [(match_operand 2 "" "") (match_operand 3 "" "")]) 11536 (match_dup 0) 11537 (match_operand 4 "" ""))) 11538 (clobber (reg:CC CC_REGNUM))] 11539 "TARGET_ARM && reload_completed" 11540 [(set (match_dup 5) (match_dup 6)) 11541 (cond_exec (match_dup 7) 11542 (set (match_dup 0) (match_dup 4)))] 11543 " 11544 { 11545 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]), 11546 operands[2], operands[3]); 11547 enum rtx_code rc = GET_CODE (operands[1]); 11548 11549 operands[5] = gen_rtx_REG (mode, CC_REGNUM); 11550 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]); 11551 if (mode == CCFPmode || mode == CCFPEmode) 11552 rc = reverse_condition_maybe_unordered (rc); 11553 else 11554 rc = reverse_condition (rc); 11555 11556 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx); 11557 }" 11558) 11559 11560(define_split 11561 [(set (match_operand:SI 0 "s_register_operand" "") 11562 (if_then_else:SI (match_operator 1 "arm_comparison_operator" 11563 [(match_operand 2 "" "") (match_operand 3 "" "")]) 11564 (match_operand 4 "" "") 11565 (match_dup 0))) 11566 (clobber (reg:CC CC_REGNUM))] 11567 "TARGET_ARM && reload_completed" 11568 [(set (match_dup 5) (match_dup 6)) 11569 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)]) 11570 (set (match_dup 0) (match_dup 4)))] 11571 " 11572 { 11573 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]), 11574 operands[2], operands[3]); 11575 11576 operands[5] = gen_rtx_REG (mode, CC_REGNUM); 11577 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]); 11578 }" 11579) 11580 11581(define_split 11582 [(set (match_operand:SI 0 "s_register_operand" "") 11583 (if_then_else:SI (match_operator 1 "arm_comparison_operator" 11584 [(match_operand 2 "" "") (match_operand 3 "" "")]) 11585 (match_operand 4 "" "") 11586 (match_operand 5 "" ""))) 11587 (clobber (reg:CC CC_REGNUM))] 11588 "TARGET_ARM && reload_completed" 11589 [(set (match_dup 6) (match_dup 7)) 11590 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)]) 11591 (set (match_dup 0) (match_dup 4))) 11592 (cond_exec (match_dup 8) 11593 (set (match_dup 0) (match_dup 5)))] 11594 " 11595 { 11596 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]), 11597 operands[2], operands[3]); 11598 enum rtx_code rc = GET_CODE (operands[1]); 11599 11600 operands[6] = gen_rtx_REG (mode, CC_REGNUM); 11601 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]); 11602 if (mode == CCFPmode || mode == CCFPEmode) 11603 rc = reverse_condition_maybe_unordered (rc); 11604 else 11605 rc = reverse_condition (rc); 11606 11607 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx); 11608 }" 11609) 11610 11611(define_split 11612 [(set (match_operand:SI 0 "s_register_operand" "") 11613 (if_then_else:SI (match_operator 1 "arm_comparison_operator" 11614 [(match_operand:SI 2 "s_register_operand" "") 11615 (match_operand:SI 3 "arm_add_operand" "")]) 11616 (match_operand:SI 4 "arm_rhs_operand" "") 11617 (not:SI 11618 (match_operand:SI 5 "s_register_operand" "")))) 11619 (clobber (reg:CC CC_REGNUM))] 11620 "TARGET_ARM && reload_completed" 11621 [(set (match_dup 6) (match_dup 7)) 11622 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)]) 11623 (set (match_dup 0) (match_dup 4))) 11624 (cond_exec (match_dup 8) 11625 (set (match_dup 0) (not:SI (match_dup 5))))] 11626 " 11627 { 11628 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]), 11629 operands[2], operands[3]); 11630 enum rtx_code rc = GET_CODE (operands[1]); 11631 11632 operands[6] = gen_rtx_REG (mode, CC_REGNUM); 11633 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]); 11634 if (mode == CCFPmode || mode == CCFPEmode) 11635 rc = reverse_condition_maybe_unordered (rc); 11636 else 11637 rc = reverse_condition (rc); 11638 11639 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx); 11640 }" 11641) 11642 11643(define_insn "*cond_move_not" 11644 [(set (match_operand:SI 0 "s_register_operand" "=r,r") 11645 (if_then_else:SI (match_operator 4 "arm_comparison_operator" 11646 [(match_operand 3 "cc_register" "") (const_int 0)]) 11647 (match_operand:SI 1 "arm_rhs_operand" "0,?rI") 11648 (not:SI 11649 (match_operand:SI 2 "s_register_operand" "r,r"))))] 11650 "TARGET_ARM" 11651 "@ 11652 mvn%D4\\t%0, %2 11653 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2" 11654 [(set_attr "conds" "use") 11655 (set_attr "type" "mvn_reg,multiple") 11656 (set_attr "length" "4,8")] 11657) 11658 11659;; The next two patterns occur when an AND operation is followed by a 11660;; scc insn sequence 11661 11662(define_insn "*sign_extract_onebit" 11663 [(set (match_operand:SI 0 "s_register_operand" "=r") 11664 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r") 11665 (const_int 1) 11666 (match_operand:SI 2 "const_int_operand" "n"))) 11667 (clobber (reg:CC CC_REGNUM))] 11668 "TARGET_ARM" 11669 "* 11670 operands[2] = GEN_INT (1 << INTVAL (operands[2])); 11671 output_asm_insn (\"ands\\t%0, %1, %2\", operands); 11672 return \"mvnne\\t%0, #0\"; 11673 " 11674 [(set_attr "conds" "clob") 11675 (set_attr "length" "8") 11676 (set_attr "type" "multiple")] 11677) 11678 11679(define_insn "*not_signextract_onebit" 11680 [(set (match_operand:SI 0 "s_register_operand" "=r") 11681 (not:SI 11682 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r") 11683 (const_int 1) 11684 (match_operand:SI 2 "const_int_operand" "n")))) 11685 (clobber (reg:CC CC_REGNUM))] 11686 "TARGET_ARM" 11687 "* 11688 operands[2] = GEN_INT (1 << INTVAL (operands[2])); 11689 output_asm_insn (\"tst\\t%1, %2\", operands); 11690 output_asm_insn (\"mvneq\\t%0, #0\", operands); 11691 return \"movne\\t%0, #0\"; 11692 " 11693 [(set_attr "conds" "clob") 11694 (set_attr "length" "12") 11695 (set_attr "type" "multiple")] 11696) 11697;; ??? The above patterns need auditing for Thumb-2 11698 11699;; Push multiple registers to the stack. Registers are in parallel (use ...) 11700;; expressions. For simplicity, the first register is also in the unspec 11701;; part. 11702;; To avoid the usage of GNU extension, the length attribute is computed 11703;; in a C function arm_attr_length_push_multi. 11704(define_insn "*push_multi" 11705 [(match_parallel 2 "multi_register_push" 11706 [(set (match_operand:BLK 0 "push_mult_memory_operand" "") 11707 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")] 11708 UNSPEC_PUSH_MULT))])] 11709 "" 11710 "* 11711 { 11712 int num_saves = XVECLEN (operands[2], 0); 11713 11714 /* For the StrongARM at least it is faster to 11715 use STR to store only a single register. 11716 In Thumb mode always use push, and the assembler will pick 11717 something appropriate. */ 11718 if (num_saves == 1 && TARGET_ARM) 11719 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands); 11720 else 11721 { 11722 int i; 11723 char pattern[100]; 11724 11725 if (TARGET_32BIT) 11726 strcpy (pattern, \"push%?\\t{%1\"); 11727 else 11728 strcpy (pattern, \"push\\t{%1\"); 11729 11730 for (i = 1; i < num_saves; i++) 11731 { 11732 strcat (pattern, \", %|\"); 11733 strcat (pattern, 11734 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]); 11735 } 11736 11737 strcat (pattern, \"}\"); 11738 output_asm_insn (pattern, operands); 11739 } 11740 11741 return \"\"; 11742 }" 11743 [(set_attr "type" "store_16") 11744 (set (attr "length") 11745 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))] 11746) 11747 11748(define_insn "stack_tie" 11749 [(set (mem:BLK (scratch)) 11750 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk") 11751 (match_operand:SI 1 "s_register_operand" "rk")] 11752 UNSPEC_PRLG_STK))] 11753 "" 11754 "" 11755 [(set_attr "length" "0") 11756 (set_attr "type" "block")] 11757) 11758 11759;; Pop (as used in epilogue RTL) 11760;; 11761(define_insn "*load_multiple_with_writeback" 11762 [(match_parallel 0 "load_multiple_operation" 11763 [(set (match_operand:SI 1 "s_register_operand" "+rk") 11764 (plus:SI (match_dup 1) 11765 (match_operand:SI 2 "const_int_I_operand" "I"))) 11766 (set (match_operand:SI 3 "s_register_operand" "=rk") 11767 (mem:SI (match_dup 1))) 11768 ])] 11769 "TARGET_32BIT && (reload_in_progress || reload_completed)" 11770 "* 11771 { 11772 arm_output_multireg_pop (operands, /*return_pc=*/false, 11773 /*cond=*/const_true_rtx, 11774 /*reverse=*/false, 11775 /*update=*/true); 11776 return \"\"; 11777 } 11778 " 11779 [(set_attr "type" "load_16") 11780 (set_attr "predicable" "yes") 11781 (set (attr "length") 11782 (symbol_ref "arm_attr_length_pop_multi (operands, 11783 /*return_pc=*/false, 11784 /*write_back_p=*/true)"))] 11785) 11786 11787;; Pop with return (as used in epilogue RTL) 11788;; 11789;; This instruction is generated when the registers are popped at the end of 11790;; epilogue. Here, instead of popping the value into LR and then generating 11791;; jump to LR, value is popped into PC directly. Hence, the pattern is combined 11792;; with (return). 11793(define_insn "*pop_multiple_with_writeback_and_return" 11794 [(match_parallel 0 "pop_multiple_return" 11795 [(return) 11796 (set (match_operand:SI 1 "s_register_operand" "+rk") 11797 (plus:SI (match_dup 1) 11798 (match_operand:SI 2 "const_int_I_operand" "I"))) 11799 (set (match_operand:SI 3 "s_register_operand" "=rk") 11800 (mem:SI (match_dup 1))) 11801 ])] 11802 "TARGET_32BIT && (reload_in_progress || reload_completed)" 11803 "* 11804 { 11805 arm_output_multireg_pop (operands, /*return_pc=*/true, 11806 /*cond=*/const_true_rtx, 11807 /*reverse=*/false, 11808 /*update=*/true); 11809 return \"\"; 11810 } 11811 " 11812 [(set_attr "type" "load_16") 11813 (set_attr "predicable" "yes") 11814 (set (attr "length") 11815 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true, 11816 /*write_back_p=*/true)"))] 11817) 11818 11819(define_insn "*pop_multiple_with_return" 11820 [(match_parallel 0 "pop_multiple_return" 11821 [(return) 11822 (set (match_operand:SI 2 "s_register_operand" "=rk") 11823 (mem:SI (match_operand:SI 1 "s_register_operand" "rk"))) 11824 ])] 11825 "TARGET_32BIT && (reload_in_progress || reload_completed)" 11826 "* 11827 { 11828 arm_output_multireg_pop (operands, /*return_pc=*/true, 11829 /*cond=*/const_true_rtx, 11830 /*reverse=*/false, 11831 /*update=*/false); 11832 return \"\"; 11833 } 11834 " 11835 [(set_attr "type" "load_16") 11836 (set_attr "predicable" "yes") 11837 (set (attr "length") 11838 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true, 11839 /*write_back_p=*/false)"))] 11840) 11841 11842;; Load into PC and return 11843(define_insn "*ldr_with_return" 11844 [(return) 11845 (set (reg:SI PC_REGNUM) 11846 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))] 11847 "TARGET_32BIT && (reload_in_progress || reload_completed)" 11848 "ldr%?\t%|pc, [%0], #4" 11849 [(set_attr "type" "load_4") 11850 (set_attr "predicable" "yes")] 11851) 11852;; Pop for floating point registers (as used in epilogue RTL) 11853(define_insn "*vfp_pop_multiple_with_writeback" 11854 [(match_parallel 0 "pop_multiple_fp" 11855 [(set (match_operand:SI 1 "s_register_operand" "+rk") 11856 (plus:SI (match_dup 1) 11857 (match_operand:SI 2 "const_int_I_operand" "I"))) 11858 (set (match_operand:DF 3 "vfp_hard_register_operand" "") 11859 (mem:DF (match_dup 1)))])] 11860 "TARGET_32BIT && TARGET_VFP_BASE" 11861 "* 11862 { 11863 int num_regs = XVECLEN (operands[0], 0); 11864 char pattern[100]; 11865 rtx op_list[2]; 11866 strcpy (pattern, \"vldm\\t\"); 11867 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]); 11868 strcat (pattern, \"!, {\"); 11869 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0); 11870 strcat (pattern, \"%P0\"); 11871 if ((num_regs - 1) > 1) 11872 { 11873 strcat (pattern, \"-%P1\"); 11874 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0); 11875 } 11876 11877 strcat (pattern, \"}\"); 11878 output_asm_insn (pattern, op_list); 11879 return \"\"; 11880 } 11881 " 11882 [(set_attr "type" "load_16") 11883 (set_attr "conds" "unconditional") 11884 (set_attr "predicable" "no")] 11885) 11886 11887;; Special patterns for dealing with the constant pool 11888 11889(define_insn "align_4" 11890 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)] 11891 "TARGET_EITHER" 11892 "* 11893 assemble_align (32); 11894 return \"\"; 11895 " 11896 [(set_attr "type" "no_insn")] 11897) 11898 11899(define_insn "align_8" 11900 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)] 11901 "TARGET_EITHER" 11902 "* 11903 assemble_align (64); 11904 return \"\"; 11905 " 11906 [(set_attr "type" "no_insn")] 11907) 11908 11909(define_insn "consttable_end" 11910 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)] 11911 "TARGET_EITHER" 11912 "* 11913 making_const_table = FALSE; 11914 return \"\"; 11915 " 11916 [(set_attr "type" "no_insn")] 11917) 11918 11919(define_insn "consttable_1" 11920 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)] 11921 "TARGET_EITHER" 11922 "* 11923 making_const_table = TRUE; 11924 assemble_integer (operands[0], 1, BITS_PER_WORD, 1); 11925 assemble_zeros (3); 11926 return \"\"; 11927 " 11928 [(set_attr "length" "4") 11929 (set_attr "type" "no_insn")] 11930) 11931 11932(define_insn "consttable_2" 11933 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)] 11934 "TARGET_EITHER" 11935 "* 11936 { 11937 rtx x = operands[0]; 11938 making_const_table = TRUE; 11939 switch (GET_MODE_CLASS (GET_MODE (x))) 11940 { 11941 case MODE_FLOAT: 11942 arm_emit_fp16_const (x); 11943 break; 11944 default: 11945 assemble_integer (operands[0], 2, BITS_PER_WORD, 1); 11946 assemble_zeros (2); 11947 break; 11948 } 11949 return \"\"; 11950 }" 11951 [(set_attr "length" "4") 11952 (set_attr "type" "no_insn")] 11953) 11954 11955(define_insn "consttable_4" 11956 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)] 11957 "TARGET_EITHER" 11958 "* 11959 { 11960 rtx x = operands[0]; 11961 making_const_table = TRUE; 11962 scalar_float_mode float_mode; 11963 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode)) 11964 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD); 11965 else 11966 { 11967 /* XXX: Sometimes gcc does something really dumb and ends up with 11968 a HIGH in a constant pool entry, usually because it's trying to 11969 load into a VFP register. We know this will always be used in 11970 combination with a LO_SUM which ignores the high bits, so just 11971 strip off the HIGH. */ 11972 if (GET_CODE (x) == HIGH) 11973 x = XEXP (x, 0); 11974 assemble_integer (x, 4, BITS_PER_WORD, 1); 11975 mark_symbol_refs_as_used (x); 11976 } 11977 return \"\"; 11978 }" 11979 [(set_attr "length" "4") 11980 (set_attr "type" "no_insn")] 11981) 11982 11983(define_insn "consttable_8" 11984 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)] 11985 "TARGET_EITHER" 11986 "* 11987 { 11988 making_const_table = TRUE; 11989 scalar_float_mode float_mode; 11990 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode)) 11991 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]), 11992 float_mode, BITS_PER_WORD); 11993 else 11994 assemble_integer (operands[0], 8, BITS_PER_WORD, 1); 11995 return \"\"; 11996 }" 11997 [(set_attr "length" "8") 11998 (set_attr "type" "no_insn")] 11999) 12000 12001(define_insn "consttable_16" 12002 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)] 12003 "TARGET_EITHER" 12004 "* 12005 { 12006 making_const_table = TRUE; 12007 scalar_float_mode float_mode; 12008 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode)) 12009 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]), 12010 float_mode, BITS_PER_WORD); 12011 else 12012 assemble_integer (operands[0], 16, BITS_PER_WORD, 1); 12013 return \"\"; 12014 }" 12015 [(set_attr "length" "16") 12016 (set_attr "type" "no_insn")] 12017) 12018 12019;; V5 Instructions, 12020 12021(define_insn "clzsi2" 12022 [(set (match_operand:SI 0 "s_register_operand" "=r") 12023 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))] 12024 "TARGET_32BIT && arm_arch5t" 12025 "clz%?\\t%0, %1" 12026 [(set_attr "predicable" "yes") 12027 (set_attr "type" "clz")]) 12028 12029(define_insn "rbitsi2" 12030 [(set (match_operand:SI 0 "s_register_operand" "=r") 12031 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))] 12032 "TARGET_32BIT && arm_arch_thumb2" 12033 "rbit%?\\t%0, %1" 12034 [(set_attr "predicable" "yes") 12035 (set_attr "type" "clz")]) 12036 12037;; Keep this as a CTZ expression until after reload and then split 12038;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely 12039;; to fold with any other expression. 12040 12041(define_insn_and_split "ctzsi2" 12042 [(set (match_operand:SI 0 "s_register_operand" "=r") 12043 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))] 12044 "TARGET_32BIT && arm_arch_thumb2" 12045 "#" 12046 "&& reload_completed" 12047 [(const_int 0)] 12048 " 12049 emit_insn (gen_rbitsi2 (operands[0], operands[1])); 12050 emit_insn (gen_clzsi2 (operands[0], operands[0])); 12051 DONE; 12052") 12053 12054;; V5E instructions. 12055 12056(define_insn "prefetch" 12057 [(prefetch (match_operand:SI 0 "address_operand" "p") 12058 (match_operand:SI 1 "" "") 12059 (match_operand:SI 2 "" ""))] 12060 "TARGET_32BIT && arm_arch5te" 12061 "pld\\t%a0" 12062 [(set_attr "type" "load_4")] 12063) 12064 12065;; General predication pattern 12066 12067(define_cond_exec 12068 [(match_operator 0 "arm_comparison_operator" 12069 [(match_operand 1 "cc_register" "") 12070 (const_int 0)])] 12071 "TARGET_32BIT 12072 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))" 12073 "" 12074[(set_attr "predicated" "yes")] 12075) 12076 12077(define_insn "force_register_use" 12078 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)] 12079 "" 12080 "%@ %0 needed" 12081 [(set_attr "length" "0") 12082 (set_attr "type" "no_insn")] 12083) 12084 12085 12086;; Patterns for exception handling 12087 12088(define_expand "eh_return" 12089 [(use (match_operand 0 "general_operand"))] 12090 "TARGET_EITHER" 12091 " 12092 { 12093 if (TARGET_32BIT) 12094 emit_insn (gen_arm_eh_return (operands[0])); 12095 else 12096 emit_insn (gen_thumb_eh_return (operands[0])); 12097 DONE; 12098 }" 12099) 12100 12101;; We can't expand this before we know where the link register is stored. 12102(define_insn_and_split "arm_eh_return" 12103 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")] 12104 VUNSPEC_EH_RETURN) 12105 (clobber (match_scratch:SI 1 "=&r"))] 12106 "TARGET_ARM" 12107 "#" 12108 "&& reload_completed" 12109 [(const_int 0)] 12110 " 12111 { 12112 arm_set_return_address (operands[0], operands[1]); 12113 DONE; 12114 }" 12115) 12116 12117 12118;; TLS support 12119 12120(define_insn "load_tp_hard" 12121 [(set (match_operand:SI 0 "register_operand" "=r") 12122 (unspec:SI [(const_int 0)] UNSPEC_TLS))] 12123 "TARGET_HARD_TP" 12124 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard" 12125 [(set_attr "predicable" "yes") 12126 (set_attr "type" "mrs")] 12127) 12128 12129;; Doesn't clobber R1-R3. Must use r0 for the first operand. 12130(define_insn "load_tp_soft_fdpic" 12131 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS)) 12132 (clobber (reg:SI FDPIC_REGNUM)) 12133 (clobber (reg:SI LR_REGNUM)) 12134 (clobber (reg:SI IP_REGNUM)) 12135 (clobber (reg:CC CC_REGNUM))] 12136 "TARGET_SOFT_TP && TARGET_FDPIC" 12137 "bl\\t__aeabi_read_tp\\t@ load_tp_soft" 12138 [(set_attr "conds" "clob") 12139 (set_attr "type" "branch")] 12140) 12141 12142;; Doesn't clobber R1-R3. Must use r0 for the first operand. 12143(define_insn "load_tp_soft" 12144 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS)) 12145 (clobber (reg:SI LR_REGNUM)) 12146 (clobber (reg:SI IP_REGNUM)) 12147 (clobber (reg:CC CC_REGNUM))] 12148 "TARGET_SOFT_TP && !TARGET_FDPIC" 12149 "bl\\t__aeabi_read_tp\\t@ load_tp_soft" 12150 [(set_attr "conds" "clob") 12151 (set_attr "type" "branch")] 12152) 12153 12154;; tls descriptor call 12155(define_insn "tlscall" 12156 [(set (reg:SI R0_REGNUM) 12157 (unspec:SI [(reg:SI R0_REGNUM) 12158 (match_operand:SI 0 "" "X") 12159 (match_operand 1 "" "")] UNSPEC_TLS)) 12160 (clobber (reg:SI R1_REGNUM)) 12161 (clobber (reg:SI LR_REGNUM)) 12162 (clobber (reg:SI CC_REGNUM))] 12163 "TARGET_GNU2_TLS" 12164 { 12165 targetm.asm_out.internal_label (asm_out_file, "LPIC", 12166 INTVAL (operands[1])); 12167 return "bl\\t%c0(tlscall)"; 12168 } 12169 [(set_attr "conds" "clob") 12170 (set_attr "length" "4") 12171 (set_attr "type" "branch")] 12172) 12173 12174;; For thread pointer builtin 12175(define_expand "get_thread_pointersi" 12176 [(match_operand:SI 0 "s_register_operand")] 12177 "" 12178 " 12179 { 12180 arm_load_tp (operands[0]); 12181 DONE; 12182 }") 12183 12184;; 12185 12186;; We only care about the lower 16 bits of the constant 12187;; being inserted into the upper 16 bits of the register. 12188(define_insn "*arm_movtas_ze" 12189 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r") 12190 (const_int 16) 12191 (const_int 16)) 12192 (match_operand:SI 1 "const_int_operand" ""))] 12193 "TARGET_HAVE_MOVT" 12194 "@ 12195 movt%?\t%0, %L1 12196 movt\t%0, %L1" 12197 [(set_attr "arch" "32,v8mb") 12198 (set_attr "predicable" "yes") 12199 (set_attr "length" "4") 12200 (set_attr "type" "alu_sreg")] 12201) 12202 12203(define_insn "*arm_rev" 12204 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r") 12205 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))] 12206 "arm_arch6" 12207 "@ 12208 rev\t%0, %1 12209 rev%?\t%0, %1 12210 rev%?\t%0, %1" 12211 [(set_attr "arch" "t1,t2,32") 12212 (set_attr "length" "2,2,4") 12213 (set_attr "predicable" "no,yes,yes") 12214 (set_attr "type" "rev")] 12215) 12216 12217(define_expand "arm_legacy_rev" 12218 [(set (match_operand:SI 2 "s_register_operand") 12219 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand") 12220 (const_int 16)) 12221 (match_dup 1))) 12222 (set (match_dup 2) 12223 (lshiftrt:SI (match_dup 2) 12224 (const_int 8))) 12225 (set (match_operand:SI 3 "s_register_operand") 12226 (rotatert:SI (match_dup 1) 12227 (const_int 8))) 12228 (set (match_dup 2) 12229 (and:SI (match_dup 2) 12230 (const_int -65281))) 12231 (set (match_operand:SI 0 "s_register_operand") 12232 (xor:SI (match_dup 3) 12233 (match_dup 2)))] 12234 "TARGET_32BIT" 12235 "" 12236) 12237 12238;; Reuse temporaries to keep register pressure down. 12239(define_expand "thumb_legacy_rev" 12240 [(set (match_operand:SI 2 "s_register_operand") 12241 (ashift:SI (match_operand:SI 1 "s_register_operand") 12242 (const_int 24))) 12243 (set (match_operand:SI 3 "s_register_operand") 12244 (lshiftrt:SI (match_dup 1) 12245 (const_int 24))) 12246 (set (match_dup 3) 12247 (ior:SI (match_dup 3) 12248 (match_dup 2))) 12249 (set (match_operand:SI 4 "s_register_operand") 12250 (const_int 16)) 12251 (set (match_operand:SI 5 "s_register_operand") 12252 (rotatert:SI (match_dup 1) 12253 (match_dup 4))) 12254 (set (match_dup 2) 12255 (ashift:SI (match_dup 5) 12256 (const_int 24))) 12257 (set (match_dup 5) 12258 (lshiftrt:SI (match_dup 5) 12259 (const_int 24))) 12260 (set (match_dup 5) 12261 (ior:SI (match_dup 5) 12262 (match_dup 2))) 12263 (set (match_dup 5) 12264 (rotatert:SI (match_dup 5) 12265 (match_dup 4))) 12266 (set (match_operand:SI 0 "s_register_operand") 12267 (ior:SI (match_dup 5) 12268 (match_dup 3)))] 12269 "TARGET_THUMB" 12270 "" 12271) 12272 12273;; ARM-specific expansion of signed mod by power of 2 12274;; using conditional negate. 12275;; For r0 % n where n is a power of 2 produce: 12276;; rsbs r1, r0, #0 12277;; and r0, r0, #(n - 1) 12278;; and r1, r1, #(n - 1) 12279;; rsbpl r0, r1, #0 12280 12281(define_expand "modsi3" 12282 [(match_operand:SI 0 "register_operand") 12283 (match_operand:SI 1 "register_operand") 12284 (match_operand:SI 2 "const_int_operand")] 12285 "TARGET_32BIT" 12286 { 12287 HOST_WIDE_INT val = INTVAL (operands[2]); 12288 12289 if (val <= 0 12290 || exact_log2 (val) <= 0) 12291 FAIL; 12292 12293 rtx mask = GEN_INT (val - 1); 12294 12295 /* In the special case of x0 % 2 we can do the even shorter: 12296 cmp r0, #0 12297 and r0, r0, #1 12298 rsblt r0, r0, #0. */ 12299 12300 if (val == 2) 12301 { 12302 rtx cc_reg = arm_gen_compare_reg (LT, 12303 operands[1], const0_rtx, NULL_RTX); 12304 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx); 12305 rtx masked = gen_reg_rtx (SImode); 12306 12307 emit_insn (gen_andsi3 (masked, operands[1], mask)); 12308 emit_move_insn (operands[0], 12309 gen_rtx_IF_THEN_ELSE (SImode, cond, 12310 gen_rtx_NEG (SImode, 12311 masked), 12312 masked)); 12313 DONE; 12314 } 12315 12316 rtx neg_op = gen_reg_rtx (SImode); 12317 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx, 12318 operands[1])); 12319 12320 /* Extract the condition register and mode. */ 12321 rtx cmp = XVECEXP (PATTERN (insn), 0, 0); 12322 rtx cc_reg = SET_DEST (cmp); 12323 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx); 12324 12325 emit_insn (gen_andsi3 (operands[0], operands[1], mask)); 12326 12327 rtx masked_neg = gen_reg_rtx (SImode); 12328 emit_insn (gen_andsi3 (masked_neg, neg_op, mask)); 12329 12330 /* We want a conditional negate here, but emitting COND_EXEC rtxes 12331 during expand does not always work. Do an IF_THEN_ELSE instead. */ 12332 emit_move_insn (operands[0], 12333 gen_rtx_IF_THEN_ELSE (SImode, cond, 12334 gen_rtx_NEG (SImode, masked_neg), 12335 operands[0])); 12336 12337 12338 DONE; 12339 } 12340) 12341 12342(define_expand "bswapsi2" 12343 [(set (match_operand:SI 0 "s_register_operand") 12344 (bswap:SI (match_operand:SI 1 "s_register_operand")))] 12345"TARGET_EITHER && (arm_arch6 || !optimize_size)" 12346" 12347 if (!arm_arch6) 12348 { 12349 rtx op2 = gen_reg_rtx (SImode); 12350 rtx op3 = gen_reg_rtx (SImode); 12351 12352 if (TARGET_THUMB) 12353 { 12354 rtx op4 = gen_reg_rtx (SImode); 12355 rtx op5 = gen_reg_rtx (SImode); 12356 12357 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1], 12358 op2, op3, op4, op5)); 12359 } 12360 else 12361 { 12362 emit_insn (gen_arm_legacy_rev (operands[0], operands[1], 12363 op2, op3)); 12364 } 12365 12366 DONE; 12367 } 12368 " 12369) 12370 12371;; bswap16 patterns: use revsh and rev16 instructions for the signed 12372;; and unsigned variants, respectively. For rev16, expose 12373;; byte-swapping in the lower 16 bits only. 12374(define_insn "*arm_revsh" 12375 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r") 12376 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))] 12377 "arm_arch6" 12378 "@ 12379 revsh\t%0, %1 12380 revsh%?\t%0, %1 12381 revsh%?\t%0, %1" 12382 [(set_attr "arch" "t1,t2,32") 12383 (set_attr "length" "2,2,4") 12384 (set_attr "type" "rev")] 12385) 12386 12387(define_insn "*arm_rev16" 12388 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r") 12389 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))] 12390 "arm_arch6" 12391 "@ 12392 rev16\t%0, %1 12393 rev16%?\t%0, %1 12394 rev16%?\t%0, %1" 12395 [(set_attr "arch" "t1,t2,32") 12396 (set_attr "length" "2,2,4") 12397 (set_attr "type" "rev")] 12398) 12399 12400;; There are no canonicalisation rules for the position of the lshiftrt, ashift 12401;; operations within an IOR/AND RTX, therefore we have two patterns matching 12402;; each valid permutation. 12403 12404(define_insn "arm_rev16si2" 12405 [(set (match_operand:SI 0 "register_operand" "=l,l,r") 12406 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r") 12407 (const_int 8)) 12408 (match_operand:SI 3 "const_int_operand" "n,n,n")) 12409 (and:SI (lshiftrt:SI (match_dup 1) 12410 (const_int 8)) 12411 (match_operand:SI 2 "const_int_operand" "n,n,n"))))] 12412 "arm_arch6 12413 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode) 12414 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)" 12415 "rev16\\t%0, %1" 12416 [(set_attr "arch" "t1,t2,32") 12417 (set_attr "length" "2,2,4") 12418 (set_attr "type" "rev")] 12419) 12420 12421(define_insn "arm_rev16si2_alt" 12422 [(set (match_operand:SI 0 "register_operand" "=l,l,r") 12423 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r") 12424 (const_int 8)) 12425 (match_operand:SI 2 "const_int_operand" "n,n,n")) 12426 (and:SI (ashift:SI (match_dup 1) 12427 (const_int 8)) 12428 (match_operand:SI 3 "const_int_operand" "n,n,n"))))] 12429 "arm_arch6 12430 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode) 12431 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)" 12432 "rev16\\t%0, %1" 12433 [(set_attr "arch" "t1,t2,32") 12434 (set_attr "length" "2,2,4") 12435 (set_attr "type" "rev")] 12436) 12437 12438(define_expand "bswaphi2" 12439 [(set (match_operand:HI 0 "s_register_operand") 12440 (bswap:HI (match_operand:HI 1 "s_register_operand")))] 12441"arm_arch6" 12442"" 12443) 12444 12445;; Patterns for LDRD/STRD in Thumb2 mode 12446 12447(define_insn "*thumb2_ldrd" 12448 [(set (match_operand:SI 0 "s_register_operand" "=r") 12449 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk") 12450 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do")))) 12451 (set (match_operand:SI 3 "s_register_operand" "=r") 12452 (mem:SI (plus:SI (match_dup 1) 12453 (match_operand:SI 4 "const_int_operand" ""))))] 12454 "TARGET_LDRD && TARGET_THUMB2 && reload_completed 12455 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4])) 12456 && (operands_ok_ldrd_strd (operands[0], operands[3], 12457 operands[1], INTVAL (operands[2]), 12458 false, true))" 12459 "ldrd%?\t%0, %3, [%1, %2]" 12460 [(set_attr "type" "load_8") 12461 (set_attr "predicable" "yes")]) 12462 12463(define_insn "*thumb2_ldrd_base" 12464 [(set (match_operand:SI 0 "s_register_operand" "=r") 12465 (mem:SI (match_operand:SI 1 "s_register_operand" "rk"))) 12466 (set (match_operand:SI 2 "s_register_operand" "=r") 12467 (mem:SI (plus:SI (match_dup 1) 12468 (const_int 4))))] 12469 "TARGET_LDRD && TARGET_THUMB2 && reload_completed 12470 && (operands_ok_ldrd_strd (operands[0], operands[2], 12471 operands[1], 0, false, true))" 12472 "ldrd%?\t%0, %2, [%1]" 12473 [(set_attr "type" "load_8") 12474 (set_attr "predicable" "yes")]) 12475 12476(define_insn "*thumb2_ldrd_base_neg" 12477 [(set (match_operand:SI 0 "s_register_operand" "=r") 12478 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk") 12479 (const_int -4)))) 12480 (set (match_operand:SI 2 "s_register_operand" "=r") 12481 (mem:SI (match_dup 1)))] 12482 "TARGET_LDRD && TARGET_THUMB2 && reload_completed 12483 && (operands_ok_ldrd_strd (operands[0], operands[2], 12484 operands[1], -4, false, true))" 12485 "ldrd%?\t%0, %2, [%1, #-4]" 12486 [(set_attr "type" "load_8") 12487 (set_attr "predicable" "yes")]) 12488 12489(define_insn "*thumb2_strd" 12490 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk") 12491 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do"))) 12492 (match_operand:SI 2 "s_register_operand" "r")) 12493 (set (mem:SI (plus:SI (match_dup 0) 12494 (match_operand:SI 3 "const_int_operand" ""))) 12495 (match_operand:SI 4 "s_register_operand" "r"))] 12496 "TARGET_LDRD && TARGET_THUMB2 && reload_completed 12497 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3])) 12498 && (operands_ok_ldrd_strd (operands[2], operands[4], 12499 operands[0], INTVAL (operands[1]), 12500 false, false))" 12501 "strd%?\t%2, %4, [%0, %1]" 12502 [(set_attr "type" "store_8") 12503 (set_attr "predicable" "yes")]) 12504 12505(define_insn "*thumb2_strd_base" 12506 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk")) 12507 (match_operand:SI 1 "s_register_operand" "r")) 12508 (set (mem:SI (plus:SI (match_dup 0) 12509 (const_int 4))) 12510 (match_operand:SI 2 "s_register_operand" "r"))] 12511 "TARGET_LDRD && TARGET_THUMB2 && reload_completed 12512 && (operands_ok_ldrd_strd (operands[1], operands[2], 12513 operands[0], 0, false, false))" 12514 "strd%?\t%1, %2, [%0]" 12515 [(set_attr "type" "store_8") 12516 (set_attr "predicable" "yes")]) 12517 12518(define_insn "*thumb2_strd_base_neg" 12519 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk") 12520 (const_int -4))) 12521 (match_operand:SI 1 "s_register_operand" "r")) 12522 (set (mem:SI (match_dup 0)) 12523 (match_operand:SI 2 "s_register_operand" "r"))] 12524 "TARGET_LDRD && TARGET_THUMB2 && reload_completed 12525 && (operands_ok_ldrd_strd (operands[1], operands[2], 12526 operands[0], -4, false, false))" 12527 "strd%?\t%1, %2, [%0, #-4]" 12528 [(set_attr "type" "store_8") 12529 (set_attr "predicable" "yes")]) 12530 12531;; ARMv8 CRC32 instructions. 12532(define_insn "arm_<crc_variant>" 12533 [(set (match_operand:SI 0 "s_register_operand" "=r") 12534 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r") 12535 (match_operand:<crc_mode> 2 "s_register_operand" "r")] 12536 CRC))] 12537 "TARGET_CRC32" 12538 "<crc_variant>\\t%0, %1, %2" 12539 [(set_attr "type" "crc") 12540 (set_attr "conds" "unconditional")] 12541) 12542 12543;; Load the load/store double peephole optimizations. 12544(include "ldrdstrd.md") 12545 12546;; Load the load/store multiple patterns 12547(include "ldmstm.md") 12548 12549;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers 12550;; large lists without explicit writeback generated for APCS_FRAME epilogue. 12551;; The operands are validated through the load_multiple_operation 12552;; match_parallel predicate rather than through constraints so enable it only 12553;; after reload. 12554(define_insn "*load_multiple" 12555 [(match_parallel 0 "load_multiple_operation" 12556 [(set (match_operand:SI 2 "s_register_operand" "=rk") 12557 (mem:SI (match_operand:SI 1 "s_register_operand" "rk"))) 12558 ])] 12559 "TARGET_32BIT && reload_completed" 12560 "* 12561 { 12562 arm_output_multireg_pop (operands, /*return_pc=*/false, 12563 /*cond=*/const_true_rtx, 12564 /*reverse=*/false, 12565 /*update=*/false); 12566 return \"\"; 12567 } 12568 " 12569 [(set_attr "predicable" "yes")] 12570) 12571 12572(define_expand "copysignsf3" 12573 [(match_operand:SF 0 "register_operand") 12574 (match_operand:SF 1 "register_operand") 12575 (match_operand:SF 2 "register_operand")] 12576 "TARGET_SOFT_FLOAT && arm_arch_thumb2" 12577 "{ 12578 emit_move_insn (operands[0], operands[2]); 12579 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0), 12580 GEN_INT (31), GEN_INT (0), 12581 simplify_gen_subreg (SImode, operands[1], SFmode, 0))); 12582 DONE; 12583 }" 12584) 12585 12586(define_expand "copysigndf3" 12587 [(match_operand:DF 0 "register_operand") 12588 (match_operand:DF 1 "register_operand") 12589 (match_operand:DF 2 "register_operand")] 12590 "TARGET_SOFT_FLOAT && arm_arch_thumb2" 12591 "{ 12592 rtx op0_low = gen_lowpart (SImode, operands[0]); 12593 rtx op0_high = gen_highpart (SImode, operands[0]); 12594 rtx op1_low = gen_lowpart (SImode, operands[1]); 12595 rtx op1_high = gen_highpart (SImode, operands[1]); 12596 rtx op2_high = gen_highpart (SImode, operands[2]); 12597 12598 rtx scratch1 = gen_reg_rtx (SImode); 12599 rtx scratch2 = gen_reg_rtx (SImode); 12600 emit_move_insn (scratch1, op2_high); 12601 emit_move_insn (scratch2, op1_high); 12602 12603 emit_insn(gen_rtx_SET(scratch1, 12604 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31)))); 12605 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1)); 12606 emit_move_insn (op0_low, op1_low); 12607 emit_move_insn (op0_high, scratch2); 12608 12609 DONE; 12610 }" 12611) 12612 12613;; movmisalign patterns for HImode and SImode. 12614(define_expand "movmisalign<mode>" 12615 [(match_operand:HSI 0 "general_operand") 12616 (match_operand:HSI 1 "general_operand")] 12617 "unaligned_access" 12618{ 12619 /* This pattern is not permitted to fail during expansion: if both arguments 12620 are non-registers (e.g. memory := constant), force operand 1 into a 12621 register. */ 12622 rtx (* gen_unaligned_load)(rtx, rtx); 12623 rtx tmp_dest = operands[0]; 12624 if (!s_register_operand (operands[0], <MODE>mode) 12625 && !s_register_operand (operands[1], <MODE>mode)) 12626 operands[1] = force_reg (<MODE>mode, operands[1]); 12627 12628 if (<MODE>mode == HImode) 12629 { 12630 gen_unaligned_load = gen_unaligned_loadhiu; 12631 tmp_dest = gen_reg_rtx (SImode); 12632 } 12633 else 12634 gen_unaligned_load = gen_unaligned_loadsi; 12635 12636 if (MEM_P (operands[1])) 12637 { 12638 emit_insn (gen_unaligned_load (tmp_dest, operands[1])); 12639 if (<MODE>mode == HImode) 12640 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest)); 12641 } 12642 else 12643 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1])); 12644 12645 DONE; 12646}) 12647 12648(define_insn "arm_<cdp>" 12649 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n") 12650 (match_operand:SI 1 "immediate_operand" "n") 12651 (match_operand:SI 2 "immediate_operand" "n") 12652 (match_operand:SI 3 "immediate_operand" "n") 12653 (match_operand:SI 4 "immediate_operand" "n") 12654 (match_operand:SI 5 "immediate_operand" "n")] CDPI)] 12655 "arm_coproc_builtin_available (VUNSPEC_<CDP>)" 12656{ 12657 arm_const_bounds (operands[0], 0, 16); 12658 arm_const_bounds (operands[1], 0, 16); 12659 arm_const_bounds (operands[2], 0, (1 << 5)); 12660 arm_const_bounds (operands[3], 0, (1 << 5)); 12661 arm_const_bounds (operands[4], 0, (1 << 5)); 12662 arm_const_bounds (operands[5], 0, 8); 12663 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5"; 12664} 12665 [(set_attr "length" "4") 12666 (set_attr "type" "coproc")]) 12667 12668(define_insn "*ldc" 12669 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n") 12670 (match_operand:SI 1 "immediate_operand" "n") 12671 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)] 12672 "arm_coproc_builtin_available (VUNSPEC_<LDC>)" 12673{ 12674 arm_const_bounds (operands[0], 0, 16); 12675 arm_const_bounds (operands[1], 0, (1 << 5)); 12676 return "<ldc>\\tp%c0, CR%c1, %2"; 12677} 12678 [(set_attr "length" "4") 12679 (set_attr "type" "coproc")]) 12680 12681(define_insn "*stc" 12682 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n") 12683 (match_operand:SI 1 "immediate_operand" "n") 12684 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)] 12685 "arm_coproc_builtin_available (VUNSPEC_<STC>)" 12686{ 12687 arm_const_bounds (operands[0], 0, 16); 12688 arm_const_bounds (operands[1], 0, (1 << 5)); 12689 return "<stc>\\tp%c0, CR%c1, %2"; 12690} 12691 [(set_attr "length" "4") 12692 (set_attr "type" "coproc")]) 12693 12694(define_expand "arm_<ldc>" 12695 [(unspec_volatile [(match_operand:SI 0 "immediate_operand") 12696 (match_operand:SI 1 "immediate_operand") 12697 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)] 12698 "arm_coproc_builtin_available (VUNSPEC_<LDC>)") 12699 12700(define_expand "arm_<stc>" 12701 [(unspec_volatile [(match_operand:SI 0 "immediate_operand") 12702 (match_operand:SI 1 "immediate_operand") 12703 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)] 12704 "arm_coproc_builtin_available (VUNSPEC_<STC>)") 12705 12706(define_insn "arm_<mcr>" 12707 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n") 12708 (match_operand:SI 1 "immediate_operand" "n") 12709 (match_operand:SI 2 "s_register_operand" "r") 12710 (match_operand:SI 3 "immediate_operand" "n") 12711 (match_operand:SI 4 "immediate_operand" "n") 12712 (match_operand:SI 5 "immediate_operand" "n")] MCRI) 12713 (use (match_dup 2))] 12714 "arm_coproc_builtin_available (VUNSPEC_<MCR>)" 12715{ 12716 arm_const_bounds (operands[0], 0, 16); 12717 arm_const_bounds (operands[1], 0, 8); 12718 arm_const_bounds (operands[3], 0, (1 << 5)); 12719 arm_const_bounds (operands[4], 0, (1 << 5)); 12720 arm_const_bounds (operands[5], 0, 8); 12721 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5"; 12722} 12723 [(set_attr "length" "4") 12724 (set_attr "type" "coproc")]) 12725 12726(define_insn "arm_<mrc>" 12727 [(set (match_operand:SI 0 "s_register_operand" "=r") 12728 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n") 12729 (match_operand:SI 2 "immediate_operand" "n") 12730 (match_operand:SI 3 "immediate_operand" "n") 12731 (match_operand:SI 4 "immediate_operand" "n") 12732 (match_operand:SI 5 "immediate_operand" "n")] MRCI))] 12733 "arm_coproc_builtin_available (VUNSPEC_<MRC>)" 12734{ 12735 arm_const_bounds (operands[1], 0, 16); 12736 arm_const_bounds (operands[2], 0, 8); 12737 arm_const_bounds (operands[3], 0, (1 << 5)); 12738 arm_const_bounds (operands[4], 0, (1 << 5)); 12739 arm_const_bounds (operands[5], 0, 8); 12740 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5"; 12741} 12742 [(set_attr "length" "4") 12743 (set_attr "type" "coproc")]) 12744 12745(define_insn "arm_<mcrr>" 12746 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n") 12747 (match_operand:SI 1 "immediate_operand" "n") 12748 (match_operand:DI 2 "s_register_operand" "r") 12749 (match_operand:SI 3 "immediate_operand" "n")] MCRRI) 12750 (use (match_dup 2))] 12751 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)" 12752{ 12753 arm_const_bounds (operands[0], 0, 16); 12754 arm_const_bounds (operands[1], 0, 8); 12755 arm_const_bounds (operands[3], 0, (1 << 5)); 12756 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3"; 12757} 12758 [(set_attr "length" "4") 12759 (set_attr "type" "coproc")]) 12760 12761(define_insn "arm_<mrrc>" 12762 [(set (match_operand:DI 0 "s_register_operand" "=r") 12763 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n") 12764 (match_operand:SI 2 "immediate_operand" "n") 12765 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))] 12766 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)" 12767{ 12768 arm_const_bounds (operands[1], 0, 16); 12769 arm_const_bounds (operands[2], 0, 8); 12770 arm_const_bounds (operands[3], 0, (1 << 5)); 12771 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3"; 12772} 12773 [(set_attr "length" "4") 12774 (set_attr "type" "coproc")]) 12775 12776(define_expand "speculation_barrier" 12777 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)] 12778 "TARGET_EITHER" 12779 " 12780 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't 12781 have a usable barrier (and probably don't need one in practice). 12782 But to be safe if such code is run on later architectures, call a 12783 helper function in libgcc that will do the thing for the active 12784 system. */ 12785 if (!(arm_arch7 || arm_arch8)) 12786 { 12787 arm_emit_speculation_barrier_function (); 12788 DONE; 12789 } 12790 " 12791) 12792 12793;; Generate a hard speculation barrier when we have not enabled speculation 12794;; tracking. 12795(define_insn "*speculation_barrier_insn" 12796 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)] 12797 "arm_arch7 || arm_arch8" 12798 "isb\;dsb\\tsy" 12799 [(set_attr "type" "block") 12800 (set_attr "length" "8")] 12801) 12802 12803;; Vector bits common to IWMMXT, Neon and MVE 12804(include "vec-common.md") 12805;; Load the Intel Wireless Multimedia Extension patterns 12806(include "iwmmxt.md") 12807;; Load the VFP co-processor patterns 12808(include "vfp.md") 12809;; Thumb-1 patterns 12810(include "thumb1.md") 12811;; Thumb-2 patterns 12812(include "thumb2.md") 12813;; Neon patterns 12814(include "neon.md") 12815;; Crypto patterns 12816(include "crypto.md") 12817;; Synchronization Primitives 12818(include "sync.md") 12819;; Fixed-point patterns 12820(include "arm-fixed.md") 12821;; M-profile Vector Extension 12822(include "mve.md") 12823