110d565efSmrg;; ARM Thumb-1 Machine Description
2*ec02198aSmrg;; Copyright (C) 2007-2020 Free Software Foundation, Inc.
310d565efSmrg;;
410d565efSmrg;; This file is part of GCC.
510d565efSmrg;;
610d565efSmrg;; GCC is free software; you can redistribute it and/or modify it
710d565efSmrg;; under the terms of the GNU General Public License as published by
810d565efSmrg;; the Free Software Foundation; either version 3, or (at your option)
910d565efSmrg;; any later version.
1010d565efSmrg;;
1110d565efSmrg;; GCC is distributed in the hope that it will be useful, but
1210d565efSmrg;; WITHOUT ANY WARRANTY; without even the implied warranty of
1310d565efSmrg;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
1410d565efSmrg;; General Public License for more details.
1510d565efSmrg;;
1610d565efSmrg;; You should have received a copy of the GNU General Public License
1710d565efSmrg;; along with GCC; see the file COPYING3.  If not see
1810d565efSmrg;; <http://www.gnu.org/licenses/>.  */
1910d565efSmrg
2010d565efSmrg
2110d565efSmrg;;---------------------------------------------------------------------------
2210d565efSmrg;; Insn patterns
2310d565efSmrg;;
2410d565efSmrg
2510d565efSmrg;; Beware of splitting Thumb1 patterns that output multiple
2610d565efSmrg;; assembly instructions, in particular instruction such as SBC and
2710d565efSmrg;; ADC which consume flags.  For example, in the pattern thumb_subdi3
2810d565efSmrg;; below, the output SUB implicitly sets the flags (assembled to SUBS)
2910d565efSmrg;; and then the Carry flag is used by SBC to compute the correct
3010d565efSmrg;; result.  If we split thumb_subdi3 pattern into two separate RTL
3110d565efSmrg;; insns (using define_insn_and_split), the scheduler might place
3210d565efSmrg;; other RTL insns between SUB and SBC, possibly modifying the Carry
3310d565efSmrg;; flag used by SBC.  This might happen because most Thumb1 patterns
3410d565efSmrg;; for flag-setting instructions do not have explicit RTL for setting
3510d565efSmrg;; or clobbering the flags.  Instead, they have the attribute "conds"
3610d565efSmrg;; with value "set" or "clob".  However, this attribute is not used to
3710d565efSmrg;; identify dependencies and therefore the scheduler might reorder
3810d565efSmrg;; these instruction.  Currenly, this problem cannot happen because
3910d565efSmrg;; there are no separate Thumb1 patterns for individual instruction
4010d565efSmrg;; that consume flags (except conditional execution, which is treated
4110d565efSmrg;; differently).  In particular there is no Thumb1 armv6-m pattern for
4210d565efSmrg;; sbc or adc.
4310d565efSmrg
4410d565efSmrg
4510d565efSmrg
460fc04c29Smrg(define_insn "thumb1_movsi_symbol_ref"
470fc04c29Smrg  [(set (match_operand:SI 0 "register_operand" "=l")
480fc04c29Smrg	(match_operand:SI 1 "general_operand" ""))
490fc04c29Smrg   ]
500fc04c29Smrg  "TARGET_THUMB1
510fc04c29Smrg   && arm_disable_literal_pool
520fc04c29Smrg   && GET_CODE (operands[1]) == SYMBOL_REF"
530fc04c29Smrg  "*
540fc04c29Smrg  output_asm_insn (\"movs\\t%0, #:upper8_15:%1\", operands);
550fc04c29Smrg  output_asm_insn (\"lsls\\t%0, #8\", operands);
560fc04c29Smrg  output_asm_insn (\"adds\\t%0, #:upper0_7:%1\", operands);
570fc04c29Smrg  output_asm_insn (\"lsls\\t%0, #8\", operands);
580fc04c29Smrg  output_asm_insn (\"adds\\t%0, #:lower8_15:%1\", operands);
590fc04c29Smrg  output_asm_insn (\"lsls\\t%0, #8\", operands);
600fc04c29Smrg  output_asm_insn (\"adds\\t%0, #:lower0_7:%1\", operands);
610fc04c29Smrg  return \"\";
620fc04c29Smrg  "
630fc04c29Smrg  [(set_attr "length" "14")
640fc04c29Smrg   (set_attr "conds" "clob")]
650fc04c29Smrg)
660fc04c29Smrg
670fc04c29Smrg(define_split
680fc04c29Smrg  [(set (match_operand:SI 0 "register_operand" "")
690fc04c29Smrg	(match_operand:SI 1 "immediate_operand" ""))]
700fc04c29Smrg  "TARGET_THUMB1
710fc04c29Smrg   && arm_disable_literal_pool
720fc04c29Smrg   && GET_CODE (operands[1]) == CONST_INT
73*ec02198aSmrg   && !TARGET_HAVE_MOVT
740fc04c29Smrg   && !satisfies_constraint_I (operands[1])"
750fc04c29Smrg  [(clobber (const_int 0))]
760fc04c29Smrg  "
770fc04c29Smrg    thumb1_gen_const_int (operands[0], INTVAL (operands[1]));
780fc04c29Smrg    DONE;
790fc04c29Smrg  "
800fc04c29Smrg)
810fc04c29Smrg
8210d565efSmrg(define_insn "*thumb1_adddi3"
8310d565efSmrg  [(set (match_operand:DI          0 "register_operand" "=l")
8410d565efSmrg	(plus:DI (match_operand:DI 1 "register_operand" "%0")
8510d565efSmrg		 (match_operand:DI 2 "register_operand" "l")))
8610d565efSmrg   (clobber (reg:CC CC_REGNUM))
8710d565efSmrg  ]
8810d565efSmrg  "TARGET_THUMB1"
8910d565efSmrg  "adds\\t%Q0, %Q0, %Q2\;adcs\\t%R0, %R0, %R2"
9010d565efSmrg  [(set_attr "length" "4")
9110d565efSmrg   (set_attr "type" "multiple")]
9210d565efSmrg)
9310d565efSmrg
9410d565efSmrg;; Changes to the constraints of this pattern must be propagated to those of
9510d565efSmrg;; atomic additions in sync.md and to the logic for bind_old_new in
9610d565efSmrg;; arm_split_atomic_op in arm.c.  These must be at least as strict as the
9710d565efSmrg;; constraints here and aim to be as permissive.
9810d565efSmrg(define_insn_and_split "*thumb1_addsi3"
9910d565efSmrg  [(set (match_operand:SI          0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
10010d565efSmrg	(plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
10110d565efSmrg		 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
10210d565efSmrg  "TARGET_THUMB1"
10310d565efSmrg  "*
10410d565efSmrg   static const char * const asms[] =
10510d565efSmrg   {
10610d565efSmrg     \"adds\\t%0, %0, %2\",
10710d565efSmrg     \"subs\\t%0, %0, #%n2\",
10810d565efSmrg     \"adds\\t%0, %1, %2\",
10910d565efSmrg     \"add\\t%0, %0, %2\",
11010d565efSmrg     \"add\\t%0, %0, %2\",
11110d565efSmrg     \"add\\t%0, %1, %2\",
11210d565efSmrg     \"add\\t%0, %1, %2\",
11310d565efSmrg     \"#\",
11410d565efSmrg     \"#\",
11510d565efSmrg     \"#\"
11610d565efSmrg   };
11710d565efSmrg   if ((which_alternative == 2 || which_alternative == 6)
11810d565efSmrg       && CONST_INT_P (operands[2])
11910d565efSmrg       && INTVAL (operands[2]) < 0)
12010d565efSmrg     return (which_alternative == 2) ? \"subs\\t%0, %1, #%n2\" : \"sub\\t%0, %1, #%n2\";
12110d565efSmrg   return asms[which_alternative];
12210d565efSmrg  "
12310d565efSmrg  "&& reload_completed && CONST_INT_P (operands[2])
12410d565efSmrg   && ((operands[1] != stack_pointer_rtx
12510d565efSmrg        && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
12610d565efSmrg       || (operands[1] == stack_pointer_rtx
12710d565efSmrg 	   && INTVAL (operands[2]) > 1020))"
12810d565efSmrg  [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
12910d565efSmrg   (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
13010d565efSmrg  {
13110d565efSmrg    HOST_WIDE_INT offset = INTVAL (operands[2]);
13210d565efSmrg    if (operands[1] == stack_pointer_rtx)
13310d565efSmrg      offset -= 1020;
13410d565efSmrg    else
13510d565efSmrg      {
13610d565efSmrg        if (offset > 255)
13710d565efSmrg	  offset = 255;
13810d565efSmrg	else if (offset < -255)
13910d565efSmrg	  offset = -255;
14010d565efSmrg      }
14110d565efSmrg    operands[3] = GEN_INT (offset);
14210d565efSmrg    operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
14310d565efSmrg  }
14410d565efSmrg  [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")
14510d565efSmrg   (set_attr "type" "alus_imm,alus_imm,alus_sreg,alus_sreg,alus_sreg,
14610d565efSmrg		     alus_sreg,alus_sreg,multiple,multiple,multiple")]
14710d565efSmrg)
14810d565efSmrg
14910d565efSmrg;; Reloading and elimination of the frame pointer can
15010d565efSmrg;; sometimes cause this optimization to be missed.
15110d565efSmrg(define_peephole2
15210d565efSmrg  [(set (match_operand:SI 0 "arm_general_register_operand" "")
15310d565efSmrg	(match_operand:SI 1 "const_int_operand" ""))
15410d565efSmrg   (set (match_dup 0)
15510d565efSmrg	(plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
15610d565efSmrg  "TARGET_THUMB1
15710d565efSmrg   && UINTVAL (operands[1]) < 1024
15810d565efSmrg   && (UINTVAL (operands[1]) & 3) == 0"
15910d565efSmrg  [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
16010d565efSmrg  ""
16110d565efSmrg)
16210d565efSmrg
16310d565efSmrg(define_insn "*thumb_subdi3"
16410d565efSmrg  [(set (match_operand:DI           0 "register_operand" "=l")
16510d565efSmrg	(minus:DI (match_operand:DI 1 "register_operand"  "0")
16610d565efSmrg		  (match_operand:DI 2 "register_operand"  "l")))
16710d565efSmrg   (clobber (reg:CC CC_REGNUM))]
16810d565efSmrg  "TARGET_THUMB1"
16910d565efSmrg  "subs\\t%Q0, %Q0, %Q2\;sbcs\\t%R0, %R0, %R2"
17010d565efSmrg  [(set_attr "length" "4")
17110d565efSmrg   (set_attr "type" "multiple")]
17210d565efSmrg)
17310d565efSmrg
17410d565efSmrg;; Changes to the constraints of this pattern must be propagated to those of
17510d565efSmrg;; atomic subtractions in sync.md and to the logic for bind_old_new in
17610d565efSmrg;; arm_split_atomic_op in arm.c.  These must be at least as strict as the
17710d565efSmrg;; constraints here and aim to be as permissive.
17810d565efSmrg(define_insn "thumb1_subsi3_insn"
17910d565efSmrg  [(set (match_operand:SI           0 "register_operand" "=l")
18010d565efSmrg	(minus:SI (match_operand:SI 1 "register_operand" "l")
18110d565efSmrg		  (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
18210d565efSmrg  "TARGET_THUMB1"
18310d565efSmrg  "subs\\t%0, %1, %2"
18410d565efSmrg  [(set_attr "length" "2")
18510d565efSmrg   (set_attr "conds" "set")
18610d565efSmrg   (set_attr "type" "alus_sreg")]
18710d565efSmrg)
18810d565efSmrg
18910d565efSmrg;; Unfortunately on Thumb the '&'/'0' trick can fail when operands
19010d565efSmrg;; 1 and 2 are the same, because reload will make operand 0 match
19110d565efSmrg;; operand 1 without realizing that this conflicts with operand 2.  We fix
19210d565efSmrg;; this by adding another alternative to match this case, and then `reload'
19310d565efSmrg;; it ourselves.  This alternative must come first.
19410d565efSmrg(define_insn "*thumb_mulsi3"
19510d565efSmrg  [(set (match_operand:SI          0 "register_operand" "=&l,&l,&l")
19610d565efSmrg	(mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
19710d565efSmrg		 (match_operand:SI 2 "register_operand" "l,l,l")))]
19810d565efSmrg "TARGET_THUMB1 && !arm_arch6"
19910d565efSmrg  "@
20010d565efSmrg   movs\\t%0, %1\;muls\\t%0, %2
20110d565efSmrg   mov\\t%0, %1\;muls\\t%0, %2
20210d565efSmrg   muls\\t%0, %2"
20310d565efSmrg  [(set_attr "length" "4,4,2")
20410d565efSmrg   (set_attr "type" "muls")]
20510d565efSmrg)
20610d565efSmrg
20710d565efSmrg(define_insn "*thumb_mulsi3_v6"
20810d565efSmrg  [(set (match_operand:SI          0 "register_operand" "=l,l,l")
20910d565efSmrg	(mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
21010d565efSmrg		 (match_operand:SI 2 "register_operand" "l,0,0")))]
21110d565efSmrg  "TARGET_THUMB1 && arm_arch6"
21210d565efSmrg  "@
21310d565efSmrg   muls\\t%0, %2
21410d565efSmrg   muls\\t%0, %1
21510d565efSmrg   muls\\t%0, %1"
21610d565efSmrg  [(set_attr "length" "2")
21710d565efSmrg   (set_attr "type" "muls")]
21810d565efSmrg)
21910d565efSmrg
22010d565efSmrg;; Changes to the constraints of this pattern must be propagated to those of
22110d565efSmrg;; atomic bitwise ANDs and NANDs in sync.md and to the logic for bind_old_new
22210d565efSmrg;; in arm_split_atomic_op in arm.c.  These must be at least as strict as the
22310d565efSmrg;; constraints here and aim to be as permissive.
22410d565efSmrg(define_insn "*thumb1_andsi3_insn"
22510d565efSmrg  [(set (match_operand:SI         0 "register_operand" "=l")
22610d565efSmrg	(and:SI (match_operand:SI 1 "register_operand" "%0")
22710d565efSmrg		(match_operand:SI 2 "register_operand" "l")))]
22810d565efSmrg  "TARGET_THUMB1"
22910d565efSmrg  "ands\\t%0, %2"
23010d565efSmrg  [(set_attr "length" "2")
23110d565efSmrg   (set_attr "type"  "logic_imm")
23210d565efSmrg   (set_attr "conds" "set")])
23310d565efSmrg
23410d565efSmrg(define_split
23510d565efSmrg  [(set (match_operand:SI 0 "s_register_operand" "")
23610d565efSmrg	(zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
23710d565efSmrg			 (match_operand:SI 2 "const_int_operand" "")
23810d565efSmrg			 (match_operand:SI 3 "const_int_operand" "")))
23910d565efSmrg   (clobber (match_operand:SI 4 "s_register_operand" ""))]
24010d565efSmrg  "TARGET_THUMB1"
24110d565efSmrg  [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
24210d565efSmrg   (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
24310d565efSmrg  "{
24410d565efSmrg     HOST_WIDE_INT temp = INTVAL (operands[2]);
24510d565efSmrg
24610d565efSmrg     operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
24710d565efSmrg     operands[3] = GEN_INT (32 - temp);
24810d565efSmrg   }"
24910d565efSmrg)
25010d565efSmrg
25110d565efSmrg(define_split
25210d565efSmrg  [(set (match_operand:SI 0 "s_register_operand" "")
25310d565efSmrg	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
25410d565efSmrg			 (match_operand:SI 2 "const_int_operand" "")
25510d565efSmrg			 (match_operand:SI 3 "const_int_operand" "")))]
25610d565efSmrg  "TARGET_THUMB1"
25710d565efSmrg  [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
25810d565efSmrg   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
25910d565efSmrg  "{
26010d565efSmrg     HOST_WIDE_INT temp = INTVAL (operands[2]);
26110d565efSmrg
26210d565efSmrg     operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
26310d565efSmrg     operands[3] = GEN_INT (32 - temp);
26410d565efSmrg   }"
26510d565efSmrg)
26610d565efSmrg
26710d565efSmrg(define_insn "thumb1_bicsi3"
26810d565efSmrg  [(set (match_operand:SI                 0 "register_operand" "=l")
26910d565efSmrg	(and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
27010d565efSmrg		(match_operand:SI         2 "register_operand" "0")))]
27110d565efSmrg  "TARGET_THUMB1"
27210d565efSmrg  "bics\\t%0, %1"
27310d565efSmrg  [(set_attr "length" "2")
27410d565efSmrg   (set_attr "conds" "set")
27510d565efSmrg   (set_attr "type" "logics_reg")]
27610d565efSmrg)
27710d565efSmrg
27810d565efSmrg;; Changes to the constraints of this pattern must be propagated to those of
27910d565efSmrg;; atomic inclusive ORs in sync.md and to the logic for bind_old_new in
28010d565efSmrg;; arm_split_atomic_op in arm.c.  These must be at least as strict as the
28110d565efSmrg;; constraints here and aim to be as permissive.
28210d565efSmrg(define_insn "*thumb1_iorsi3_insn"
28310d565efSmrg  [(set (match_operand:SI         0 "register_operand" "=l")
28410d565efSmrg	(ior:SI (match_operand:SI 1 "register_operand" "%0")
28510d565efSmrg		(match_operand:SI 2 "register_operand" "l")))]
28610d565efSmrg  "TARGET_THUMB1"
28710d565efSmrg  "orrs\\t%0, %2"
28810d565efSmrg  [(set_attr "length" "2")
28910d565efSmrg   (set_attr "conds" "set")
29010d565efSmrg   (set_attr "type" "logics_reg")])
29110d565efSmrg
29210d565efSmrg;; Changes to the constraints of this pattern must be propagated to those of
29310d565efSmrg;; atomic exclusive ORs in sync.md and to the logic for bind_old_new in
29410d565efSmrg;; arm_split_atomic_op in arm.c.  These must be at least as strict as the
29510d565efSmrg;; constraints here and aim to be as permissive.
29610d565efSmrg(define_insn "*thumb1_xorsi3_insn"
29710d565efSmrg  [(set (match_operand:SI         0 "register_operand" "=l")
29810d565efSmrg	(xor:SI (match_operand:SI 1 "register_operand" "%0")
29910d565efSmrg		(match_operand:SI 2 "register_operand" "l")))]
30010d565efSmrg  "TARGET_THUMB1"
30110d565efSmrg  "eors\\t%0, %2"
30210d565efSmrg  [(set_attr "length" "2")
30310d565efSmrg   (set_attr "conds" "set")
30410d565efSmrg   (set_attr "type" "logics_reg")]
30510d565efSmrg)
30610d565efSmrg
30710d565efSmrg(define_insn "*thumb1_ashlsi3"
30810d565efSmrg  [(set (match_operand:SI            0 "register_operand" "=l,l")
30910d565efSmrg	(ashift:SI (match_operand:SI 1 "register_operand" "l,0")
31010d565efSmrg		   (match_operand:SI 2 "nonmemory_operand" "N,l")))]
31110d565efSmrg  "TARGET_THUMB1"
31210d565efSmrg  "lsls\\t%0, %1, %2"
31310d565efSmrg  [(set_attr "length" "2")
31410d565efSmrg   (set_attr "type" "shift_imm,shift_reg")
31510d565efSmrg   (set_attr "conds" "set")])
31610d565efSmrg
31710d565efSmrg(define_insn "*thumb1_ashrsi3"
31810d565efSmrg  [(set (match_operand:SI              0 "register_operand" "=l,l")
31910d565efSmrg	(ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
32010d565efSmrg		     (match_operand:SI 2 "nonmemory_operand" "N,l")))]
32110d565efSmrg  "TARGET_THUMB1"
32210d565efSmrg  "asrs\\t%0, %1, %2"
32310d565efSmrg  [(set_attr "length" "2")
32410d565efSmrg   (set_attr "type" "shift_imm,shift_reg")
32510d565efSmrg   (set_attr "conds" "set")])
32610d565efSmrg
32710d565efSmrg(define_insn "*thumb1_lshrsi3"
32810d565efSmrg  [(set (match_operand:SI              0 "register_operand" "=l,l")
32910d565efSmrg	(lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
33010d565efSmrg		     (match_operand:SI 2 "nonmemory_operand" "N,l")))]
33110d565efSmrg  "TARGET_THUMB1"
33210d565efSmrg  "lsrs\\t%0, %1, %2"
33310d565efSmrg  [(set_attr "length" "2")
33410d565efSmrg   (set_attr "type" "shift_imm,shift_reg")
33510d565efSmrg   (set_attr "conds" "set")])
33610d565efSmrg
33710d565efSmrg(define_insn "*thumb1_rotrsi3"
33810d565efSmrg  [(set (match_operand:SI              0 "register_operand" "=l")
33910d565efSmrg	(rotatert:SI (match_operand:SI 1 "register_operand" "0")
34010d565efSmrg		     (match_operand:SI 2 "register_operand" "l")))]
34110d565efSmrg  "TARGET_THUMB1"
34210d565efSmrg  "rors\\t%0, %0, %2"
34310d565efSmrg  [(set_attr "type" "shift_reg")
34410d565efSmrg   (set_attr "length" "2")]
34510d565efSmrg)
34610d565efSmrg
34710d565efSmrg(define_insn "*thumb1_negdi2"
34810d565efSmrg  [(set (match_operand:DI 0 "register_operand" "=&l")
34910d565efSmrg	(neg:DI (match_operand:DI 1 "register_operand" "l")))
35010d565efSmrg   (clobber (reg:CC CC_REGNUM))]
35110d565efSmrg  "TARGET_THUMB1"
35210d565efSmrg  "movs\\t%R0, #0\;rsbs\\t%Q0, %Q1, #0\;sbcs\\t%R0, %R1"
35310d565efSmrg  [(set_attr "length" "6")
35410d565efSmrg   (set_attr "type" "multiple")]
35510d565efSmrg)
35610d565efSmrg
35710d565efSmrg(define_insn "*thumb1_negsi2"
35810d565efSmrg  [(set (match_operand:SI         0 "register_operand" "=l")
35910d565efSmrg	(neg:SI (match_operand:SI 1 "register_operand" "l")))]
36010d565efSmrg  "TARGET_THUMB1"
36110d565efSmrg  "rsbs\\t%0, %1, #0"
36210d565efSmrg  [(set_attr "length" "2")
36310d565efSmrg   (set_attr "type" "alu_imm")]
36410d565efSmrg)
36510d565efSmrg
36610d565efSmrg(define_insn_and_split "*thumb1_abssi2"
36710d565efSmrg  [(set (match_operand:SI 0 "s_register_operand" "=l")
36810d565efSmrg	(abs:SI (match_operand:SI 1 "s_register_operand" "l")))
36910d565efSmrg   (clobber (match_scratch:SI 2 "=&l"))]
37010d565efSmrg  "TARGET_THUMB1"
37110d565efSmrg  "#"
37210d565efSmrg  "TARGET_THUMB1 && reload_completed"
37310d565efSmrg  [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
37410d565efSmrg   (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
37510d565efSmrg   (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
37610d565efSmrg  ""
37710d565efSmrg  [(set_attr "length" "6")
37810d565efSmrg   (set_attr "type" "multiple")]
37910d565efSmrg)
38010d565efSmrg
38110d565efSmrg(define_insn_and_split "*thumb1_neg_abssi2"
38210d565efSmrg  [(set (match_operand:SI 0 "s_register_operand" "=l")
38310d565efSmrg	(neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
38410d565efSmrg   (clobber (match_scratch:SI 2 "=&l"))]
38510d565efSmrg  "TARGET_THUMB1"
38610d565efSmrg  "#"
38710d565efSmrg  "TARGET_THUMB1 && reload_completed"
38810d565efSmrg  [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
38910d565efSmrg   (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
39010d565efSmrg   (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
39110d565efSmrg  ""
39210d565efSmrg  [(set_attr "length" "6")
39310d565efSmrg   (set_attr "type" "multiple")]
39410d565efSmrg)
39510d565efSmrg
39610d565efSmrg(define_insn "*thumb1_one_cmplsi2"
39710d565efSmrg  [(set (match_operand:SI         0 "register_operand" "=l")
39810d565efSmrg	(not:SI (match_operand:SI 1 "register_operand"  "l")))]
39910d565efSmrg  "TARGET_THUMB1"
40010d565efSmrg  "mvns\\t%0, %1"
40110d565efSmrg  [(set_attr "length" "2")
40210d565efSmrg   (set_attr "type" "mvn_reg")]
40310d565efSmrg)
40410d565efSmrg
40510d565efSmrg(define_insn "*thumb1_zero_extendhisi2"
40610d565efSmrg  [(set (match_operand:SI 0 "register_operand" "=l,l")
40710d565efSmrg	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
40810d565efSmrg  "TARGET_THUMB1"
40910d565efSmrg{
41010d565efSmrg  rtx mem;
41110d565efSmrg
41210d565efSmrg  if (which_alternative == 0 && arm_arch6)
41310d565efSmrg    return "uxth\t%0, %1";
41410d565efSmrg  if (which_alternative == 0)
41510d565efSmrg    return "#";
41610d565efSmrg
41710d565efSmrg  mem = XEXP (operands[1], 0);
41810d565efSmrg
41910d565efSmrg  if (GET_CODE (mem) == CONST)
42010d565efSmrg    mem = XEXP (mem, 0);
42110d565efSmrg
42210d565efSmrg  if (GET_CODE (mem) == PLUS)
42310d565efSmrg    {
42410d565efSmrg      rtx a = XEXP (mem, 0);
42510d565efSmrg
42610d565efSmrg      /* This can happen due to bugs in reload.  */
42710d565efSmrg      if (REG_P (a) && REGNO (a) == SP_REGNUM)
42810d565efSmrg        {
42910d565efSmrg          rtx ops[2];
43010d565efSmrg          ops[0] = operands[0];
43110d565efSmrg          ops[1] = a;
43210d565efSmrg
43310d565efSmrg          output_asm_insn ("mov\t%0, %1", ops);
43410d565efSmrg
43510d565efSmrg          XEXP (mem, 0) = operands[0];
43610d565efSmrg       }
43710d565efSmrg    }
43810d565efSmrg
43910d565efSmrg  return "ldrh\t%0, %1";
44010d565efSmrg}
44110d565efSmrg  [(set_attr_alternative "length"
44210d565efSmrg			 [(if_then_else (eq_attr "is_arch6" "yes")
44310d565efSmrg				       (const_int 2) (const_int 4))
44410d565efSmrg			 (const_int 4)])
44510d565efSmrg   (set_attr "type" "extend,load_byte")]
44610d565efSmrg)
44710d565efSmrg
44810d565efSmrg(define_insn "*thumb1_zero_extendqisi2"
44910d565efSmrg  [(set (match_operand:SI 0 "register_operand" "=l,l")
45010d565efSmrg	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
45110d565efSmrg  "TARGET_THUMB1 && !arm_arch6"
45210d565efSmrg  "@
45310d565efSmrg   #
45410d565efSmrg   ldrb\\t%0, %1"
45510d565efSmrg  [(set_attr "length" "4,2")
45610d565efSmrg   (set_attr "type" "alu_shift_reg,load_byte")
45710d565efSmrg   (set_attr "pool_range" "*,32")]
45810d565efSmrg)
45910d565efSmrg
46010d565efSmrg(define_insn "*thumb1_zero_extendqisi2_v6"
46110d565efSmrg  [(set (match_operand:SI 0 "register_operand" "=l,l")
46210d565efSmrg	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
46310d565efSmrg  "TARGET_THUMB1 && arm_arch6"
46410d565efSmrg  "@
46510d565efSmrg   uxtb\\t%0, %1
46610d565efSmrg   ldrb\\t%0, %1"
46710d565efSmrg  [(set_attr "length" "2")
46810d565efSmrg   (set_attr "type" "extend,load_byte")]
46910d565efSmrg)
47010d565efSmrg
47110d565efSmrg;; We used to have an early-clobber on the scratch register here.
47210d565efSmrg;; However, there's a bug somewhere in reload which means that this
47310d565efSmrg;; can be partially ignored during spill allocation if the memory
47410d565efSmrg;; address also needs reloading; this causes us to die later on when
47510d565efSmrg;; we try to verify the operands.  Fortunately, we don't really need
47610d565efSmrg;; the early-clobber: we can always use operand 0 if operand 2
47710d565efSmrg;; overlaps the address.
47810d565efSmrg(define_insn "thumb1_extendhisi2"
47910d565efSmrg  [(set (match_operand:SI 0 "register_operand" "=l,l")
48010d565efSmrg	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
48110d565efSmrg   (clobber (match_scratch:SI 2 "=X,l"))]
48210d565efSmrg  "TARGET_THUMB1"
48310d565efSmrg  "*
48410d565efSmrg  {
48510d565efSmrg    rtx ops[4];
48610d565efSmrg    rtx mem;
48710d565efSmrg
48810d565efSmrg    if (which_alternative == 0 && !arm_arch6)
48910d565efSmrg      return \"#\";
49010d565efSmrg    if (which_alternative == 0)
49110d565efSmrg      return \"sxth\\t%0, %1\";
49210d565efSmrg
49310d565efSmrg    mem = XEXP (operands[1], 0);
49410d565efSmrg
49510d565efSmrg    /* This code used to try to use 'V', and fix the address only if it was
49610d565efSmrg       offsettable, but this fails for e.g. REG+48 because 48 is outside the
49710d565efSmrg       range of QImode offsets, and offsettable_address_p does a QImode
49810d565efSmrg       address check.  */
49910d565efSmrg
50010d565efSmrg    if (GET_CODE (mem) == CONST)
50110d565efSmrg      mem = XEXP (mem, 0);
50210d565efSmrg
50310d565efSmrg    if (GET_CODE (mem) == LABEL_REF)
50410d565efSmrg      return \"ldr\\t%0, %1\";
50510d565efSmrg
50610d565efSmrg    if (GET_CODE (mem) == PLUS)
50710d565efSmrg      {
50810d565efSmrg        rtx a = XEXP (mem, 0);
50910d565efSmrg        rtx b = XEXP (mem, 1);
51010d565efSmrg
51110d565efSmrg        if (GET_CODE (a) == LABEL_REF
51210d565efSmrg	    && CONST_INT_P (b))
51310d565efSmrg          return \"ldr\\t%0, %1\";
51410d565efSmrg
51510d565efSmrg        if (REG_P (b))
51610d565efSmrg          return \"ldrsh\\t%0, %1\";
51710d565efSmrg
51810d565efSmrg        ops[1] = a;
51910d565efSmrg        ops[2] = b;
52010d565efSmrg      }
52110d565efSmrg    else
52210d565efSmrg      {
52310d565efSmrg        ops[1] = mem;
52410d565efSmrg        ops[2] = const0_rtx;
52510d565efSmrg      }
52610d565efSmrg
52710d565efSmrg    gcc_assert (REG_P (ops[1]));
52810d565efSmrg
52910d565efSmrg    ops[0] = operands[0];
53010d565efSmrg    if (reg_mentioned_p (operands[2], ops[1]))
53110d565efSmrg      ops[3] = ops[0];
53210d565efSmrg    else
53310d565efSmrg      ops[3] = operands[2];
53410d565efSmrg    output_asm_insn (\"movs\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
53510d565efSmrg    return \"\";
53610d565efSmrg  }"
53710d565efSmrg  [(set_attr_alternative "length"
53810d565efSmrg			 [(if_then_else (eq_attr "is_arch6" "yes")
53910d565efSmrg					(const_int 2) (const_int 4))
54010d565efSmrg			  (const_int 4)])
54110d565efSmrg   (set_attr "type" "extend,load_byte")
54210d565efSmrg   (set_attr "pool_range" "*,1018")]
54310d565efSmrg)
54410d565efSmrg
54510d565efSmrg(define_split
54610d565efSmrg  [(set (match_operand:SI 0 "register_operand" "")
54710d565efSmrg	(sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
54810d565efSmrg  "TARGET_THUMB1 && reload_completed"
54910d565efSmrg  [(set (match_dup 0) (match_dup 2))
55010d565efSmrg   (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
55110d565efSmrg{
55210d565efSmrg  rtx addr = XEXP (operands[1], 0);
55310d565efSmrg
55410d565efSmrg  if (GET_CODE (addr) == CONST)
55510d565efSmrg    addr = XEXP (addr, 0);
55610d565efSmrg
55710d565efSmrg  if (GET_CODE (addr) == PLUS
55810d565efSmrg      && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
55910d565efSmrg    /* No split necessary.  */
56010d565efSmrg    FAIL;
56110d565efSmrg
56210d565efSmrg  if (GET_CODE (addr) == PLUS
56310d565efSmrg      && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
56410d565efSmrg    FAIL;
56510d565efSmrg
56610d565efSmrg  if (reg_overlap_mentioned_p (operands[0], addr))
56710d565efSmrg    {
56810d565efSmrg      rtx t = gen_lowpart (QImode, operands[0]);
56910d565efSmrg      emit_move_insn (t, operands[1]);
57010d565efSmrg      emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
57110d565efSmrg      DONE;
57210d565efSmrg    }
57310d565efSmrg
57410d565efSmrg  if (REG_P (addr))
57510d565efSmrg    {
57610d565efSmrg      addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
57710d565efSmrg      operands[2] = const0_rtx;
57810d565efSmrg    }
57910d565efSmrg  else if (GET_CODE (addr) != PLUS)
58010d565efSmrg    FAIL;
58110d565efSmrg  else if (REG_P (XEXP (addr, 0)))
58210d565efSmrg    {
58310d565efSmrg      operands[2] = XEXP (addr, 1);
58410d565efSmrg      addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
58510d565efSmrg    }
58610d565efSmrg  else
58710d565efSmrg    {
58810d565efSmrg      operands[2] = XEXP (addr, 0);
58910d565efSmrg      addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
59010d565efSmrg    }
59110d565efSmrg
59210d565efSmrg  operands[3] = change_address (operands[1], QImode, addr);
59310d565efSmrg})
59410d565efSmrg
59510d565efSmrg(define_peephole2
59610d565efSmrg  [(set (match_operand:SI 0 "register_operand" "")
59710d565efSmrg	(plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
59810d565efSmrg   (set (match_operand:SI 2 "register_operand" "") (const_int 0))
59910d565efSmrg   (set (match_operand:SI 3 "register_operand" "")
60010d565efSmrg	(sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
60110d565efSmrg  "TARGET_THUMB1
60210d565efSmrg   && GET_CODE (XEXP (operands[4], 0)) == PLUS
60310d565efSmrg   && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
60410d565efSmrg   && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
60510d565efSmrg   && (peep2_reg_dead_p (3, operands[0])
60610d565efSmrg       || rtx_equal_p (operands[0], operands[3]))
60710d565efSmrg   && (peep2_reg_dead_p (3, operands[2])
60810d565efSmrg       || rtx_equal_p (operands[2], operands[3]))"
60910d565efSmrg  [(set (match_dup 2) (match_dup 1))
61010d565efSmrg   (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
61110d565efSmrg{
61210d565efSmrg  rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
61310d565efSmrg  operands[4] = change_address (operands[4], QImode, addr);
61410d565efSmrg})
61510d565efSmrg
61610d565efSmrg(define_insn "thumb1_extendqisi2"
61710d565efSmrg  [(set (match_operand:SI 0 "register_operand" "=l,l,l")
61810d565efSmrg	(sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
61910d565efSmrg  "TARGET_THUMB1"
62010d565efSmrg{
62110d565efSmrg  rtx addr;
62210d565efSmrg
62310d565efSmrg  if (which_alternative == 0 && arm_arch6)
62410d565efSmrg    return "sxtb\\t%0, %1";
62510d565efSmrg  if (which_alternative == 0)
62610d565efSmrg    return "#";
62710d565efSmrg
62810d565efSmrg  addr = XEXP (operands[1], 0);
62910d565efSmrg  if (GET_CODE (addr) == PLUS
63010d565efSmrg      && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
63110d565efSmrg    return "ldrsb\\t%0, %1";
63210d565efSmrg
63310d565efSmrg  return "#";
63410d565efSmrg}
63510d565efSmrg  [(set_attr_alternative "length"
63610d565efSmrg			 [(if_then_else (eq_attr "is_arch6" "yes")
63710d565efSmrg					(const_int 2) (const_int 4))
63810d565efSmrg			  (const_int 2)
63910d565efSmrg			  (if_then_else (eq_attr "is_arch6" "yes")
64010d565efSmrg					(const_int 4) (const_int 6))])
64110d565efSmrg   (set_attr "type" "extend,load_byte,load_byte")]
64210d565efSmrg)
64310d565efSmrg
64410d565efSmrg;;; ??? This should have alternatives for constants.
64510d565efSmrg;;; ??? This was originally identical to the movdf_insn pattern.
64610d565efSmrg;;; ??? The 'i' constraint looks funny, but it should always be replaced by
64710d565efSmrg;;; thumb_reorg with a memory reference.
64810d565efSmrg(define_insn "*thumb1_movdi_insn"
64910d565efSmrg  [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,r,l,>,l, m,*r")
65010d565efSmrg	(match_operand:DI 1 "general_operand"      "l, I,J,j,>,l,mi,l,*r"))]
65110d565efSmrg  "TARGET_THUMB1
65210d565efSmrg   && (   register_operand (operands[0], DImode)
65310d565efSmrg       || register_operand (operands[1], DImode))"
65410d565efSmrg  "*
65510d565efSmrg  {
65610d565efSmrg  switch (which_alternative)
65710d565efSmrg    {
65810d565efSmrg    default:
65910d565efSmrg    case 0:
66010d565efSmrg      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
66110d565efSmrg	return \"add\\t%0,  %1,  #0\;add\\t%H0, %H1, #0\";
66210d565efSmrg      return   \"add\\t%H0, %H1, #0\;add\\t%0,  %1,  #0\";
66310d565efSmrg    case 1:
66410d565efSmrg      return \"movs\\t%Q0, %1\;movs\\t%R0, #0\";
66510d565efSmrg    case 2:
66610d565efSmrg      operands[1] = GEN_INT (- INTVAL (operands[1]));
66710d565efSmrg      return \"movs\\t%Q0, %1\;rsbs\\t%Q0, %Q0, #0\;asrs\\t%R0, %Q0, #31\";
66810d565efSmrg    case 3:
66910d565efSmrg      gcc_assert (TARGET_HAVE_MOVT);
67010d565efSmrg      return \"movw\\t%Q0, %L1\;movs\\tR0, #0\";
67110d565efSmrg    case 4:
67210d565efSmrg      return \"ldmia\\t%1, {%0, %H0}\";
67310d565efSmrg    case 5:
67410d565efSmrg      return \"stmia\\t%0, {%1, %H1}\";
67510d565efSmrg    case 6:
67610d565efSmrg      return thumb_load_double_from_address (operands);
67710d565efSmrg    case 7:
67810d565efSmrg      operands[2] = gen_rtx_MEM (SImode,
67910d565efSmrg			     plus_constant (Pmode, XEXP (operands[0], 0), 4));
68010d565efSmrg      output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
68110d565efSmrg      return \"\";
68210d565efSmrg    case 8:
68310d565efSmrg      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
68410d565efSmrg	return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
68510d565efSmrg      return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
68610d565efSmrg    }
68710d565efSmrg  }"
68810d565efSmrg  [(set_attr "length" "4,4,6,6,2,2,6,4,4")
689c7a68eb7Smrg   (set_attr "type" "multiple,multiple,multiple,multiple,load_8,store_8,load_8,store_8,multiple")
69010d565efSmrg   (set_attr "arch" "t1,t1,t1,v8mb,t1,t1,t1,t1,t1")
69110d565efSmrg   (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")]
69210d565efSmrg)
69310d565efSmrg
69410d565efSmrg(define_insn "*thumb1_movsi_insn"
6950fc04c29Smrg  [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,r,l,l,l,>,l, l, m,*l*h*k")
6960fc04c29Smrg	(match_operand:SI 1 "general_operand"      "l, I,j,J,K,>,l,i, mi,l,*l*h*k"))]
69710d565efSmrg  "TARGET_THUMB1
69810d565efSmrg   && (   register_operand (operands[0], SImode)
69910d565efSmrg       || register_operand (operands[1], SImode))"
700*ec02198aSmrg{
701*ec02198aSmrg  switch (which_alternative)
702*ec02198aSmrg    {
703*ec02198aSmrg      default:
704*ec02198aSmrg      case 0: return "movs\t%0, %1";
705*ec02198aSmrg      case 1: return "movs\t%0, %1";
706*ec02198aSmrg      case 2: return "movw\t%0, %1";
707*ec02198aSmrg      case 3: return "#";
708*ec02198aSmrg      case 4: return "#";
709*ec02198aSmrg      case 5: return "ldmia\t%1, {%0}";
710*ec02198aSmrg      case 6: return "stmia\t%0, {%1}";
711*ec02198aSmrg      case 7:
712*ec02198aSmrg      /* pure-code alternative: build the constant byte by byte,
713*ec02198aSmrg	 instead of loading it from a constant pool.  */
714*ec02198aSmrg	{
715*ec02198aSmrg	  int i;
716*ec02198aSmrg	  HOST_WIDE_INT op1 = INTVAL (operands[1]);
717*ec02198aSmrg	  bool mov_done_p = false;
718*ec02198aSmrg	  rtx ops[2];
719*ec02198aSmrg	  ops[0] = operands[0];
720*ec02198aSmrg
721*ec02198aSmrg	  /* Emit upper 3 bytes if needed.  */
722*ec02198aSmrg	  for (i = 0; i < 3; i++)
723*ec02198aSmrg	    {
724*ec02198aSmrg	       int byte = (op1 >> (8 * (3 - i))) & 0xff;
725*ec02198aSmrg
726*ec02198aSmrg	      if (byte)
727*ec02198aSmrg		{
728*ec02198aSmrg		  ops[1] = GEN_INT (byte);
729*ec02198aSmrg		  if (mov_done_p)
730*ec02198aSmrg		    output_asm_insn ("adds\t%0, %1", ops);
731*ec02198aSmrg		  else
732*ec02198aSmrg		    output_asm_insn ("movs\t%0, %1", ops);
733*ec02198aSmrg		  mov_done_p = true;
734*ec02198aSmrg		}
735*ec02198aSmrg
736*ec02198aSmrg	      if (mov_done_p)
737*ec02198aSmrg		output_asm_insn ("lsls\t%0, #8", ops);
738*ec02198aSmrg	    }
739*ec02198aSmrg
740*ec02198aSmrg	  /* Emit lower byte if needed.  */
741*ec02198aSmrg	  ops[1] = GEN_INT (op1 & 0xff);
742*ec02198aSmrg	  if (!mov_done_p)
743*ec02198aSmrg	    output_asm_insn ("movs\t%0, %1", ops);
744*ec02198aSmrg	  else if (op1 & 0xff)
745*ec02198aSmrg	    output_asm_insn ("adds\t%0, %1", ops);
746*ec02198aSmrg	  return "";
747*ec02198aSmrg	}
748*ec02198aSmrg      case 8: return "ldr\t%0, %1";
749*ec02198aSmrg      case 9: return "str\t%1, %0";
750*ec02198aSmrg      case 10: return "mov\t%0, %1";
751*ec02198aSmrg    }
752*ec02198aSmrg}
7530fc04c29Smrg  [(set_attr "length" "2,2,4,4,4,2,2,14,2,2,2")
7540fc04c29Smrg   (set_attr "type" "mov_reg,mov_imm,mov_imm,multiple,multiple,load_4,store_4,alu_sreg,load_4,store_4,mov_reg")
7550fc04c29Smrg   (set_attr "pool_range" "*,*,*,*,*,*,*, *,1018,*,*")
7560fc04c29Smrg   (set_attr "arch" "t1,t1,v8mb,t1,t1,t1,t1,t1,t1,t1,t1")
7570fc04c29Smrg   (set_attr "required_for_purecode" "no,no,no,no,no,no,no,yes,no,no,no")
7580fc04c29Smrg   (set_attr "conds" "set,clob,nocond,*,*,nocond,nocond,nocond,nocond,nocond,nocond")])
75910d565efSmrg
76010d565efSmrg; Split the load of 64-bit constant into two loads for high and low 32-bit parts respectively
76110d565efSmrg; to see if we can load them in fewer instructions or fewer cycles.
76210d565efSmrg; For the small 64-bit integer constants that satisfy constraint J, the instruction pattern
76310d565efSmrg; thumb1_movdi_insn has a better way to handle them.
76410d565efSmrg(define_split
76510d565efSmrg  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
76610d565efSmrg       (match_operand:ANY64 1 "immediate_operand" ""))]
76710d565efSmrg  "TARGET_THUMB1 && reload_completed && !satisfies_constraint_J (operands[1])"
76810d565efSmrg  [(set (match_dup 0) (match_dup 1))
76910d565efSmrg   (set (match_dup 2) (match_dup 3))]
77010d565efSmrg  "
77110d565efSmrg  operands[2] = gen_highpart (SImode, operands[0]);
77210d565efSmrg  operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
77310d565efSmrg                                  operands[1]);
77410d565efSmrg  operands[0] = gen_lowpart (SImode, operands[0]);
77510d565efSmrg  operands[1] = gen_lowpart (SImode, operands[1]);
77610d565efSmrg  "
77710d565efSmrg)
77810d565efSmrg
77910d565efSmrg(define_split
78010d565efSmrg  [(set (match_operand:SI 0 "register_operand" "")
78110d565efSmrg	(match_operand:SI 1 "const_int_operand" ""))]
78210d565efSmrg  "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
78310d565efSmrg  [(set (match_dup 2) (match_dup 1))
78410d565efSmrg   (set (match_dup 0) (neg:SI (match_dup 2)))]
78510d565efSmrg  "
78610d565efSmrg  {
78710d565efSmrg    operands[1] = GEN_INT (- INTVAL (operands[1]));
78810d565efSmrg    operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
78910d565efSmrg  }"
79010d565efSmrg)
79110d565efSmrg
79210d565efSmrg(define_split
79310d565efSmrg  [(set (match_operand:SI 0 "register_operand" "")
79410d565efSmrg	(match_operand:SI 1 "const_int_operand" ""))]
79510d565efSmrg  "TARGET_THUMB1 && satisfies_constraint_K (operands[1])
79610d565efSmrg   && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
79710d565efSmrg  [(set (match_dup 2) (match_dup 1))
79810d565efSmrg   (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
79910d565efSmrg  "
80010d565efSmrg  {
80110d565efSmrg    unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
80210d565efSmrg    unsigned HOST_WIDE_INT mask = 0xff;
80310d565efSmrg    int i;
80410d565efSmrg
80510d565efSmrg    for (i = 0; i < 25; i++)
80610d565efSmrg      if ((val & (mask << i)) == val)
80710d565efSmrg        break;
80810d565efSmrg
80910d565efSmrg    /* Don't split if the shift is zero.  */
81010d565efSmrg    if (i == 0)
81110d565efSmrg      FAIL;
81210d565efSmrg
81310d565efSmrg    operands[1] = GEN_INT (val >> i);
81410d565efSmrg    operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
81510d565efSmrg    operands[3] = GEN_INT (i);
81610d565efSmrg  }"
81710d565efSmrg)
81810d565efSmrg
81910d565efSmrg;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
82010d565efSmrg(define_split
82110d565efSmrg  [(set (match_operand:SI 0 "register_operand" "")
82210d565efSmrg	(match_operand:SI 1 "const_int_operand" ""))]
82310d565efSmrg  "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])
82410d565efSmrg   && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
82510d565efSmrg  [(set (match_dup 2) (match_dup 1))
82610d565efSmrg   (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
82710d565efSmrg  "
82810d565efSmrg  {
82910d565efSmrg    operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
83010d565efSmrg    operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
83110d565efSmrg    operands[3] = GEN_INT (255);
83210d565efSmrg  }"
83310d565efSmrg)
83410d565efSmrg
83510d565efSmrg(define_insn "*thumb1_movhi_insn"
83610d565efSmrg  [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l,r")
83710d565efSmrg	(match_operand:HI 1 "general_operand"       "l,m,l,k*h,*r,I,n"))]
83810d565efSmrg  "TARGET_THUMB1
83910d565efSmrg   && (   register_operand (operands[0], HImode)
84010d565efSmrg       || register_operand (operands[1], HImode))"
84110d565efSmrg  "*
84210d565efSmrg  switch (which_alternative)
84310d565efSmrg    {
84410d565efSmrg    case 0: return \"adds	%0, %1, #0\";
84510d565efSmrg    case 2: return \"strh	%1, %0\";
84610d565efSmrg    case 3: return \"mov	%0, %1\";
84710d565efSmrg    case 4: return \"mov	%0, %1\";
84810d565efSmrg    case 5: return \"movs	%0, %1\";
84910d565efSmrg    case 6: gcc_assert (TARGET_HAVE_MOVT);
85010d565efSmrg	    return \"movw	%0, %L1\";
85110d565efSmrg    default: gcc_unreachable ();
85210d565efSmrg    case 1:
85310d565efSmrg      /* The stack pointer can end up being taken as an index register.
85410d565efSmrg          Catch this case here and deal with it.  */
85510d565efSmrg      if (GET_CODE (XEXP (operands[1], 0)) == PLUS
85610d565efSmrg	  && REG_P (XEXP (XEXP (operands[1], 0), 0))
85710d565efSmrg	  && REGNO    (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
85810d565efSmrg        {
85910d565efSmrg	  rtx ops[2];
86010d565efSmrg          ops[0] = operands[0];
86110d565efSmrg          ops[1] = XEXP (XEXP (operands[1], 0), 0);
86210d565efSmrg
86310d565efSmrg          output_asm_insn (\"mov	%0, %1\", ops);
86410d565efSmrg
86510d565efSmrg          XEXP (XEXP (operands[1], 0), 0) = operands[0];
86610d565efSmrg
86710d565efSmrg	}
86810d565efSmrg      return \"ldrh	%0, %1\";
86910d565efSmrg    }"
87010d565efSmrg  [(set_attr "length" "2,4,2,2,2,2,4")
871c7a68eb7Smrg   (set_attr "type" "alus_imm,load_4,store_4,mov_reg,mov_reg,mov_imm,mov_imm")
87210d565efSmrg   (set_attr "arch" "t1,t1,t1,t1,t1,t1,v8mb")
87310d565efSmrg   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob,nocond")])
87410d565efSmrg
87510d565efSmrg(define_expand "thumb_movhi_clobber"
876*ec02198aSmrg  [(set (match_operand:HI     0 "memory_operand")
877*ec02198aSmrg	(match_operand:HI     1 "register_operand"))
878*ec02198aSmrg   (clobber (match_operand:DI 2 "register_operand"))]
87910d565efSmrg  "TARGET_THUMB1"
88010d565efSmrg  "
88110d565efSmrg  if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
88210d565efSmrg      && REGNO (operands[1]) <= LAST_LO_REGNUM)
88310d565efSmrg    {
88410d565efSmrg      emit_insn (gen_movhi (operands[0], operands[1]));
88510d565efSmrg      DONE;
88610d565efSmrg    }
88710d565efSmrg  /* XXX Fixme, need to handle other cases here as well.  */
88810d565efSmrg  gcc_unreachable ();
88910d565efSmrg  "
89010d565efSmrg)
89110d565efSmrg
89210d565efSmrg(define_insn "*thumb1_movqi_insn"
89310d565efSmrg  [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l")
89410d565efSmrg	(match_operand:QI 1 "general_operand"       "l,m,l,k*h,*r,I"))]
89510d565efSmrg  "TARGET_THUMB1
89610d565efSmrg   && (   register_operand (operands[0], QImode)
89710d565efSmrg       || register_operand (operands[1], QImode))"
89810d565efSmrg  "@
89910d565efSmrg   adds\\t%0, %1, #0
90010d565efSmrg   ldrb\\t%0, %1
90110d565efSmrg   strb\\t%1, %0
90210d565efSmrg   mov\\t%0, %1
90310d565efSmrg   mov\\t%0, %1
90410d565efSmrg   movs\\t%0, %1"
90510d565efSmrg  [(set_attr "length" "2")
906c7a68eb7Smrg   (set_attr "type" "alu_imm,load_4,store_4,mov_reg,mov_imm,mov_imm")
90710d565efSmrg   (set_attr "pool_range" "*,32,*,*,*,*")
90810d565efSmrg   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
90910d565efSmrg
91010d565efSmrg(define_insn "*thumb1_movhf"
9110fc04c29Smrg  [(set (match_operand:HF     0 "nonimmediate_operand" "=l,l,l,m,*r,*h")
9120fc04c29Smrg	(match_operand:HF     1 "general_operand"      "l, m,F,l,*h,*r"))]
91310d565efSmrg  "TARGET_THUMB1
91410d565efSmrg   && (	  s_register_operand (operands[0], HFmode)
91510d565efSmrg       || s_register_operand (operands[1], HFmode))"
91610d565efSmrg  "*
91710d565efSmrg  switch (which_alternative)
91810d565efSmrg    {
91910d565efSmrg    case 0:
92010d565efSmrg      return \"movs\\t%0, %1\";
92110d565efSmrg    case 1:
92210d565efSmrg      {
92310d565efSmrg	rtx addr;
92410d565efSmrg	gcc_assert (MEM_P (operands[1]));
92510d565efSmrg	addr = XEXP (operands[1], 0);
92610d565efSmrg	if (GET_CODE (addr) == LABEL_REF
92710d565efSmrg	    || (GET_CODE (addr) == CONST
92810d565efSmrg		&& GET_CODE (XEXP (addr, 0)) == PLUS
92910d565efSmrg		&& GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
93010d565efSmrg		&& CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
93110d565efSmrg	  {
93210d565efSmrg	    /* Constant pool entry.  */
93310d565efSmrg	    return \"ldr\\t%0, %1\";
93410d565efSmrg	  }
93510d565efSmrg	return \"ldrh\\t%0, %1\";
93610d565efSmrg      }
9370fc04c29Smrg    case 2:
9380fc04c29Smrg    {
9390fc04c29Smrg      int bits;
9400fc04c29Smrg      int high;
9410fc04c29Smrg      rtx ops[3];
9420fc04c29Smrg
9430fc04c29Smrg      bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
9440fc04c29Smrg			     HFmode);
9450fc04c29Smrg      ops[0] = operands[0];
9460fc04c29Smrg      high = (bits >> 8) & 0xff;
9470fc04c29Smrg      ops[1] = GEN_INT (high);
9480fc04c29Smrg      ops[2] = GEN_INT (bits & 0xff);
9490fc04c29Smrg      if (high != 0)
9500fc04c29Smrg	output_asm_insn (\"movs\\t%0, %1\;lsls\\t%0, #8\;adds\\t%0, %2\", ops);
9510fc04c29Smrg      else
9520fc04c29Smrg	output_asm_insn (\"movs\\t%0, %2\", ops);
9530fc04c29Smrg
9540fc04c29Smrg      return \"\";
9550fc04c29Smrg    }
9560fc04c29Smrg    case 3: return \"strh\\t%1, %0\";
95710d565efSmrg    default: return \"mov\\t%0, %1\";
95810d565efSmrg    }
95910d565efSmrg  "
9600fc04c29Smrg  [(set_attr "length" "2,2,6,2,2,2")
9610fc04c29Smrg   (set_attr "type" "mov_reg,load_4,mov_reg,store_4,mov_reg,mov_reg")
9620fc04c29Smrg   (set_attr "pool_range" "*,1018,*,*,*,*")
9630fc04c29Smrg   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond")])
9640fc04c29Smrg
96510d565efSmrg;;; ??? This should have alternatives for constants.
96610d565efSmrg(define_insn "*thumb1_movsf_insn"
96710d565efSmrg  [(set (match_operand:SF     0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
96810d565efSmrg	(match_operand:SF     1 "general_operand"      "l, >,l,mF,l,*h,*r"))]
96910d565efSmrg  "TARGET_THUMB1
97010d565efSmrg   && (   register_operand (operands[0], SFmode)
97110d565efSmrg       || register_operand (operands[1], SFmode))"
97210d565efSmrg  "@
97310d565efSmrg   adds\\t%0, %1, #0
97410d565efSmrg   ldmia\\t%1, {%0}
97510d565efSmrg   stmia\\t%0, {%1}
97610d565efSmrg   ldr\\t%0, %1
97710d565efSmrg   str\\t%1, %0
97810d565efSmrg   mov\\t%0, %1
97910d565efSmrg   mov\\t%0, %1"
98010d565efSmrg  [(set_attr "length" "2")
981c7a68eb7Smrg   (set_attr "type" "alus_imm,load_4,store_4,load_4,store_4,mov_reg,mov_reg")
98210d565efSmrg   (set_attr "pool_range" "*,*,*,1018,*,*,*")
98310d565efSmrg   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
98410d565efSmrg)
98510d565efSmrg
98610d565efSmrg;;; ??? This should have alternatives for constants.
98710d565efSmrg;;; ??? This was originally identical to the movdi_insn pattern.
98810d565efSmrg;;; ??? The 'F' constraint looks funny, but it should always be replaced by
98910d565efSmrg;;; thumb_reorg with a memory reference.
99010d565efSmrg(define_insn "*thumb_movdf_insn"
99110d565efSmrg  [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
99210d565efSmrg	(match_operand:DF 1 "general_operand"      "l, >,l,mF,l,*r"))]
99310d565efSmrg  "TARGET_THUMB1
99410d565efSmrg   && (   register_operand (operands[0], DFmode)
99510d565efSmrg       || register_operand (operands[1], DFmode))"
99610d565efSmrg  "*
99710d565efSmrg  switch (which_alternative)
99810d565efSmrg    {
99910d565efSmrg    default:
100010d565efSmrg    case 0:
100110d565efSmrg      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
100210d565efSmrg	return \"adds\\t%0, %1, #0\;adds\\t%H0, %H1, #0\";
100310d565efSmrg      return \"adds\\t%H0, %H1, #0\;adds\\t%0, %1, #0\";
100410d565efSmrg    case 1:
100510d565efSmrg      return \"ldmia\\t%1, {%0, %H0}\";
100610d565efSmrg    case 2:
100710d565efSmrg      return \"stmia\\t%0, {%1, %H1}\";
100810d565efSmrg    case 3:
100910d565efSmrg      return thumb_load_double_from_address (operands);
101010d565efSmrg    case 4:
101110d565efSmrg      operands[2] = gen_rtx_MEM (SImode,
101210d565efSmrg				 plus_constant (Pmode,
101310d565efSmrg						XEXP (operands[0], 0), 4));
101410d565efSmrg      output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
101510d565efSmrg      return \"\";
101610d565efSmrg    case 5:
101710d565efSmrg      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
101810d565efSmrg	return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
101910d565efSmrg      return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
102010d565efSmrg    }
102110d565efSmrg  "
102210d565efSmrg  [(set_attr "length" "4,2,2,6,4,4")
1023c7a68eb7Smrg   (set_attr "type" "multiple,load_8,store_8,load_8,store_8,multiple")
102410d565efSmrg   (set_attr "pool_range" "*,*,*,1018,*,*")]
102510d565efSmrg)
102610d565efSmrg
102710d565efSmrg
102810d565efSmrg;; Thumb block-move insns
102910d565efSmrg
1030*ec02198aSmrg(define_insn "cpymem12b"
103110d565efSmrg  [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
103210d565efSmrg	(mem:SI (match_operand:SI 3 "register_operand" "1")))
103310d565efSmrg   (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
103410d565efSmrg	(mem:SI (plus:SI (match_dup 3) (const_int 4))))
103510d565efSmrg   (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
103610d565efSmrg	(mem:SI (plus:SI (match_dup 3) (const_int 8))))
103710d565efSmrg   (set (match_operand:SI 0 "register_operand" "=l")
103810d565efSmrg	(plus:SI (match_dup 2) (const_int 12)))
103910d565efSmrg   (set (match_operand:SI 1 "register_operand" "=l")
104010d565efSmrg	(plus:SI (match_dup 3) (const_int 12)))
104110d565efSmrg   (clobber (match_scratch:SI 4 "=&l"))
104210d565efSmrg   (clobber (match_scratch:SI 5 "=&l"))
104310d565efSmrg   (clobber (match_scratch:SI 6 "=&l"))]
104410d565efSmrg  "TARGET_THUMB1"
104510d565efSmrg  "* return thumb_output_move_mem_multiple (3, operands);"
104610d565efSmrg  [(set_attr "length" "4")
104710d565efSmrg   ; This isn't entirely accurate...  It loads as well, but in terms of
104810d565efSmrg   ; scheduling the following insn it is better to consider it as a store
1049c7a68eb7Smrg   (set_attr "type" "store_12")]
105010d565efSmrg)
105110d565efSmrg
1052*ec02198aSmrg(define_insn "cpymem8b"
105310d565efSmrg  [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
105410d565efSmrg	(mem:SI (match_operand:SI 3 "register_operand" "1")))
105510d565efSmrg   (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
105610d565efSmrg	(mem:SI (plus:SI (match_dup 3) (const_int 4))))
105710d565efSmrg   (set (match_operand:SI 0 "register_operand" "=l")
105810d565efSmrg	(plus:SI (match_dup 2) (const_int 8)))
105910d565efSmrg   (set (match_operand:SI 1 "register_operand" "=l")
106010d565efSmrg	(plus:SI (match_dup 3) (const_int 8)))
106110d565efSmrg   (clobber (match_scratch:SI 4 "=&l"))
106210d565efSmrg   (clobber (match_scratch:SI 5 "=&l"))]
106310d565efSmrg  "TARGET_THUMB1"
106410d565efSmrg  "* return thumb_output_move_mem_multiple (2, operands);"
106510d565efSmrg  [(set_attr "length" "4")
106610d565efSmrg   ; This isn't entirely accurate...  It loads as well, but in terms of
106710d565efSmrg   ; scheduling the following insn it is better to consider it as a store
1068c7a68eb7Smrg   (set_attr "type" "store_8")]
106910d565efSmrg)
107010d565efSmrg
107110d565efSmrg
107210d565efSmrg;; A pattern to recognize a special situation and optimize for it.
107310d565efSmrg;; On the thumb, zero-extension from memory is preferrable to sign-extension
107410d565efSmrg;; due to the available addressing modes.  Hence, convert a signed comparison
107510d565efSmrg;; with zero into an unsigned comparison with 127 if possible.
107610d565efSmrg(define_expand "cbranchqi4"
107710d565efSmrg  [(set (pc) (if_then_else
107810d565efSmrg	      (match_operator 0 "lt_ge_comparison_operator"
1079*ec02198aSmrg	       [(match_operand:QI 1 "memory_operand")
1080*ec02198aSmrg	        (match_operand:QI 2 "const0_operand")])
108110d565efSmrg	      (label_ref (match_operand 3 "" ""))
108210d565efSmrg	      (pc)))]
108310d565efSmrg  "TARGET_THUMB1"
108410d565efSmrg{
108510d565efSmrg  rtx xops[4];
108610d565efSmrg  xops[1] = gen_reg_rtx (SImode);
108710d565efSmrg  emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
108810d565efSmrg  xops[2] = GEN_INT (127);
108910d565efSmrg  xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
109010d565efSmrg			    VOIDmode, xops[1], xops[2]);
109110d565efSmrg  xops[3] = operands[3];
109210d565efSmrg  emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
109310d565efSmrg  DONE;
109410d565efSmrg})
109510d565efSmrg
109610d565efSmrg;; A pattern for the CB(N)Z instruction added in ARMv8-M Baseline profile,
109710d565efSmrg;; adapted from cbranchsi4_insn.  Modifying cbranchsi4_insn instead leads to
109810d565efSmrg;; code generation difference for ARMv6-M because the minimum length of the
109910d565efSmrg;; instruction becomes 2 even for ARMv6-M due to a limitation in genattrtab's
110010d565efSmrg;; handling of PC in the length condition.
110110d565efSmrg(define_insn "thumb1_cbz"
110210d565efSmrg  [(set (pc) (if_then_else
110310d565efSmrg	      (match_operator 0 "equality_operator"
110410d565efSmrg	       [(match_operand:SI 1 "s_register_operand" "l")
110510d565efSmrg		(const_int 0)])
110610d565efSmrg	      (label_ref (match_operand 2 "" ""))
110710d565efSmrg	      (pc)))]
110810d565efSmrg  "TARGET_THUMB1 && TARGET_HAVE_CBZ"
110910d565efSmrg{
111010d565efSmrg  if (get_attr_length (insn) == 2)
111110d565efSmrg    {
111210d565efSmrg      if (GET_CODE (operands[0]) == EQ)
111310d565efSmrg	return "cbz\t%1, %l2";
111410d565efSmrg      else
111510d565efSmrg	return "cbnz\t%1, %l2";
111610d565efSmrg    }
111710d565efSmrg  else
111810d565efSmrg    {
111910d565efSmrg      rtx t = cfun->machine->thumb1_cc_insn;
112010d565efSmrg      if (t != NULL_RTX)
112110d565efSmrg	{
112210d565efSmrg	  if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
112310d565efSmrg	      || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
112410d565efSmrg	    t = NULL_RTX;
1125*ec02198aSmrg	  if (cfun->machine->thumb1_cc_mode == CC_NZmode)
112610d565efSmrg	    {
1127*ec02198aSmrg	      if (!nz_comparison_operator (operands[0], VOIDmode))
112810d565efSmrg		t = NULL_RTX;
112910d565efSmrg	    }
113010d565efSmrg	  else if (cfun->machine->thumb1_cc_mode != CCmode)
113110d565efSmrg	    t = NULL_RTX;
113210d565efSmrg	}
113310d565efSmrg      if (t == NULL_RTX)
113410d565efSmrg	{
113510d565efSmrg	  output_asm_insn ("cmp\t%1, #0", operands);
113610d565efSmrg	  cfun->machine->thumb1_cc_insn = insn;
113710d565efSmrg	  cfun->machine->thumb1_cc_op0 = operands[1];
113810d565efSmrg	  cfun->machine->thumb1_cc_op1 = operands[2];
113910d565efSmrg	  cfun->machine->thumb1_cc_mode = CCmode;
114010d565efSmrg	}
114110d565efSmrg      else
114210d565efSmrg	/* Ensure we emit the right type of condition code on the jump.  */
114310d565efSmrg	XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
114410d565efSmrg					     CC_REGNUM);
114510d565efSmrg
114610d565efSmrg      switch (get_attr_length (insn))
114710d565efSmrg	{
114810d565efSmrg	case 4:  return "b%d0\t%l2";
114910d565efSmrg	case 6:  return "b%D0\t.LCB%=;b\t%l2\t%@long jump\n.LCB%=:";
115010d565efSmrg	case 8:  return "b%D0\t.LCB%=;bl\t%l2\t%@far jump\n.LCB%=:";
115110d565efSmrg	default: gcc_unreachable ();
115210d565efSmrg	}
115310d565efSmrg    }
115410d565efSmrg}
115510d565efSmrg  [(set (attr "far_jump")
115610d565efSmrg	(if_then_else
115710d565efSmrg	    (eq_attr "length" "8")
115810d565efSmrg	    (const_string "yes")
115910d565efSmrg	    (const_string "no")))
116010d565efSmrg   (set (attr "length")
116110d565efSmrg	(if_then_else
116210d565efSmrg	    (and (ge (minus (match_dup 2) (pc)) (const_int 2))
116310d565efSmrg		 (le (minus (match_dup 2) (pc)) (const_int 128)))
116410d565efSmrg	    (const_int 2)
116510d565efSmrg	    (if_then_else
116610d565efSmrg		(and (ge (minus (match_dup 2) (pc)) (const_int -250))
116710d565efSmrg		     (le (minus (match_dup 2) (pc)) (const_int 256)))
116810d565efSmrg		(const_int 4)
116910d565efSmrg		(if_then_else
117010d565efSmrg		    (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
117110d565efSmrg			 (le (minus (match_dup 2) (pc)) (const_int 2048)))
117210d565efSmrg		    (const_int 6)
117310d565efSmrg		    (const_int 8)))))
117410d565efSmrg   (set (attr "type")
117510d565efSmrg	(if_then_else
117610d565efSmrg	    (eq_attr "length" "2")
117710d565efSmrg	    (const_string "branch")
117810d565efSmrg	    (const_string "multiple")))]
117910d565efSmrg)
118010d565efSmrg
118110d565efSmrg;; Changes to the constraints of this pattern must be propagated to those of
118210d565efSmrg;; atomic compare_and_swap splitters in sync.md.  These must be at least as
118310d565efSmrg;; strict as the constraints here and aim to be as permissive.
118410d565efSmrg(define_insn "cbranchsi4_insn"
118510d565efSmrg  [(set (pc) (if_then_else
118610d565efSmrg	      (match_operator 0 "arm_comparison_operator"
118710d565efSmrg	       [(match_operand:SI 1 "s_register_operand" "l,l*h")
118810d565efSmrg	        (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
118910d565efSmrg	      (label_ref (match_operand 3 "" ""))
119010d565efSmrg	      (pc)))]
119110d565efSmrg  "TARGET_THUMB1"
119210d565efSmrg{
119310d565efSmrg  rtx t = cfun->machine->thumb1_cc_insn;
119410d565efSmrg  if (t != NULL_RTX)
119510d565efSmrg    {
119610d565efSmrg      if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
119710d565efSmrg	  || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
119810d565efSmrg	t = NULL_RTX;
1199*ec02198aSmrg      if (cfun->machine->thumb1_cc_mode == CC_NZmode)
120010d565efSmrg	{
1201*ec02198aSmrg	  if (!nz_comparison_operator (operands[0], VOIDmode))
120210d565efSmrg	    t = NULL_RTX;
120310d565efSmrg	}
120410d565efSmrg      else if (cfun->machine->thumb1_cc_mode != CCmode)
120510d565efSmrg	t = NULL_RTX;
120610d565efSmrg    }
120710d565efSmrg  if (t == NULL_RTX)
120810d565efSmrg    {
120910d565efSmrg      output_asm_insn ("cmp\t%1, %2", operands);
121010d565efSmrg      cfun->machine->thumb1_cc_insn = insn;
121110d565efSmrg      cfun->machine->thumb1_cc_op0 = operands[1];
121210d565efSmrg      cfun->machine->thumb1_cc_op1 = operands[2];
121310d565efSmrg      cfun->machine->thumb1_cc_mode = CCmode;
121410d565efSmrg    }
121510d565efSmrg  else
121610d565efSmrg    /* Ensure we emit the right type of condition code on the jump.  */
121710d565efSmrg    XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
121810d565efSmrg					 CC_REGNUM);
121910d565efSmrg
122010d565efSmrg  switch (get_attr_length (insn))
122110d565efSmrg    {
122210d565efSmrg    case 4:  return \"b%d0\\t%l3\";
122310d565efSmrg    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
122410d565efSmrg    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
122510d565efSmrg    }
122610d565efSmrg}
122710d565efSmrg  [(set (attr "far_jump")
122810d565efSmrg        (if_then_else
122910d565efSmrg	    (eq_attr "length" "8")
123010d565efSmrg	    (const_string "yes")
123110d565efSmrg            (const_string "no")))
123210d565efSmrg   (set (attr "length")
123310d565efSmrg        (if_then_else
123410d565efSmrg	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
123510d565efSmrg	         (le (minus (match_dup 3) (pc)) (const_int 256)))
123610d565efSmrg	    (const_int 4)
123710d565efSmrg	    (if_then_else
123810d565efSmrg	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
123910d565efSmrg		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
124010d565efSmrg		(const_int 6)
124110d565efSmrg		(const_int 8))))
124210d565efSmrg   (set_attr "type" "multiple")]
124310d565efSmrg)
124410d565efSmrg
1245*ec02198aSmrg;; An expander which makes use of the cbranchsi4_scratch insn, but can
1246*ec02198aSmrg;; be used safely after RA.
1247*ec02198aSmrg(define_expand "cbranchsi4_neg_late"
1248*ec02198aSmrg  [(parallel [
1249*ec02198aSmrg     (set (pc) (if_then_else
1250*ec02198aSmrg		(match_operator 4 "arm_comparison_operator"
1251*ec02198aSmrg		 [(match_operand:SI 1 "s_register_operand")
1252*ec02198aSmrg		  (match_operand:SI 2 "thumb1_cmpneg_operand")])
1253*ec02198aSmrg		(label_ref (match_operand 3 "" ""))
1254*ec02198aSmrg		(pc)))
1255*ec02198aSmrg     (clobber (match_operand:SI 0 "s_register_operand"))
1256*ec02198aSmrg  ])]
1257*ec02198aSmrg  "TARGET_THUMB1"
1258*ec02198aSmrg)
1259*ec02198aSmrg
126010d565efSmrg;; Changes to the constraints of this pattern must be propagated to those of
126110d565efSmrg;; atomic compare_and_swap splitters in sync.md.  These must be at least as
126210d565efSmrg;; strict as the constraints here and aim to be as permissive.
126310d565efSmrg(define_insn "cbranchsi4_scratch"
126410d565efSmrg  [(set (pc) (if_then_else
126510d565efSmrg	      (match_operator 4 "arm_comparison_operator"
126610d565efSmrg	       [(match_operand:SI 1 "s_register_operand" "l,0")
126710d565efSmrg	        (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
126810d565efSmrg	      (label_ref (match_operand 3 "" ""))
126910d565efSmrg	      (pc)))
127010d565efSmrg   (clobber (match_scratch:SI 0 "=l,l"))]
127110d565efSmrg  "TARGET_THUMB1"
127210d565efSmrg  "*
127310d565efSmrg  output_asm_insn (\"adds\\t%0, %1, #%n2\", operands);
127410d565efSmrg
127510d565efSmrg  switch (get_attr_length (insn))
127610d565efSmrg    {
127710d565efSmrg    case 4:  return \"b%d4\\t%l3\";
127810d565efSmrg    case 6:  return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
127910d565efSmrg    default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
128010d565efSmrg    }
128110d565efSmrg  "
128210d565efSmrg  [(set (attr "far_jump")
128310d565efSmrg        (if_then_else
128410d565efSmrg	    (eq_attr "length" "8")
128510d565efSmrg	    (const_string "yes")
128610d565efSmrg            (const_string "no")))
128710d565efSmrg   (set (attr "length")
128810d565efSmrg        (if_then_else
128910d565efSmrg	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
129010d565efSmrg	         (le (minus (match_dup 3) (pc)) (const_int 256)))
129110d565efSmrg	    (const_int 4)
129210d565efSmrg	    (if_then_else
129310d565efSmrg	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
129410d565efSmrg		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
129510d565efSmrg		(const_int 6)
129610d565efSmrg		(const_int 8))))
129710d565efSmrg   (set_attr "type" "multiple")]
129810d565efSmrg)
129910d565efSmrg
130010d565efSmrg(define_insn "*negated_cbranchsi4"
130110d565efSmrg  [(set (pc)
130210d565efSmrg	(if_then_else
130310d565efSmrg	 (match_operator 0 "equality_operator"
130410d565efSmrg	  [(match_operand:SI 1 "s_register_operand" "l")
130510d565efSmrg	   (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
130610d565efSmrg	 (label_ref (match_operand 3 "" ""))
130710d565efSmrg	 (pc)))]
130810d565efSmrg  "TARGET_THUMB1"
130910d565efSmrg  "*
131010d565efSmrg  output_asm_insn (\"cmn\\t%1, %2\", operands);
131110d565efSmrg  switch (get_attr_length (insn))
131210d565efSmrg    {
131310d565efSmrg    case 4:  return \"b%d0\\t%l3\";
131410d565efSmrg    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
131510d565efSmrg    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
131610d565efSmrg    }
131710d565efSmrg  "
131810d565efSmrg  [(set (attr "far_jump")
131910d565efSmrg        (if_then_else
132010d565efSmrg	    (eq_attr "length" "8")
132110d565efSmrg	    (const_string "yes")
132210d565efSmrg            (const_string "no")))
132310d565efSmrg   (set (attr "length")
132410d565efSmrg        (if_then_else
132510d565efSmrg	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
132610d565efSmrg	         (le (minus (match_dup 3) (pc)) (const_int 256)))
132710d565efSmrg	    (const_int 4)
132810d565efSmrg	    (if_then_else
132910d565efSmrg	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
133010d565efSmrg		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
133110d565efSmrg		(const_int 6)
133210d565efSmrg		(const_int 8))))
133310d565efSmrg   (set_attr "type" "multiple")]
133410d565efSmrg)
133510d565efSmrg
133610d565efSmrg(define_insn "*tbit_cbranch"
133710d565efSmrg  [(set (pc)
133810d565efSmrg	(if_then_else
133910d565efSmrg	 (match_operator 0 "equality_operator"
134010d565efSmrg	  [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
134110d565efSmrg			    (const_int 1)
134210d565efSmrg			    (match_operand:SI 2 "const_int_operand" "i"))
134310d565efSmrg	   (const_int 0)])
134410d565efSmrg	 (label_ref (match_operand 3 "" ""))
134510d565efSmrg	 (pc)))
134610d565efSmrg   (clobber (match_scratch:SI 4 "=l"))]
134710d565efSmrg  "TARGET_THUMB1"
134810d565efSmrg  "*
134910d565efSmrg  {
135010d565efSmrg  rtx op[3];
135110d565efSmrg  op[0] = operands[4];
135210d565efSmrg  op[1] = operands[1];
135310d565efSmrg  op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
135410d565efSmrg
135510d565efSmrg  output_asm_insn (\"lsls\\t%0, %1, %2\", op);
135610d565efSmrg  switch (get_attr_length (insn))
135710d565efSmrg    {
135810d565efSmrg    case 4:  return \"b%d0\\t%l3\";
135910d565efSmrg    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
136010d565efSmrg    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
136110d565efSmrg    }
136210d565efSmrg  }"
136310d565efSmrg  [(set (attr "far_jump")
136410d565efSmrg        (if_then_else
136510d565efSmrg	    (eq_attr "length" "8")
136610d565efSmrg	    (const_string "yes")
136710d565efSmrg            (const_string "no")))
136810d565efSmrg   (set (attr "length")
136910d565efSmrg        (if_then_else
137010d565efSmrg	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
137110d565efSmrg	         (le (minus (match_dup 3) (pc)) (const_int 256)))
137210d565efSmrg	    (const_int 4)
137310d565efSmrg	    (if_then_else
137410d565efSmrg	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
137510d565efSmrg		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
137610d565efSmrg		(const_int 6)
137710d565efSmrg		(const_int 8))))
137810d565efSmrg   (set_attr "type" "multiple")]
137910d565efSmrg)
138010d565efSmrg
138110d565efSmrg(define_insn "*tlobits_cbranch"
138210d565efSmrg  [(set (pc)
138310d565efSmrg	(if_then_else
138410d565efSmrg	 (match_operator 0 "equality_operator"
138510d565efSmrg	  [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
138610d565efSmrg			    (match_operand:SI 2 "const_int_operand" "i")
138710d565efSmrg			    (const_int 0))
138810d565efSmrg	   (const_int 0)])
138910d565efSmrg	 (label_ref (match_operand 3 "" ""))
139010d565efSmrg	 (pc)))
139110d565efSmrg   (clobber (match_scratch:SI 4 "=l"))]
139210d565efSmrg  "TARGET_THUMB1"
139310d565efSmrg  "*
139410d565efSmrg  {
139510d565efSmrg  rtx op[3];
139610d565efSmrg  op[0] = operands[4];
139710d565efSmrg  op[1] = operands[1];
139810d565efSmrg  op[2] = GEN_INT (32 - INTVAL (operands[2]));
139910d565efSmrg
140010d565efSmrg  output_asm_insn (\"lsls\\t%0, %1, %2\", op);
140110d565efSmrg  switch (get_attr_length (insn))
140210d565efSmrg    {
140310d565efSmrg    case 4:  return \"b%d0\\t%l3\";
140410d565efSmrg    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
140510d565efSmrg    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
140610d565efSmrg    }
140710d565efSmrg  }"
140810d565efSmrg  [(set (attr "far_jump")
140910d565efSmrg        (if_then_else
141010d565efSmrg	    (eq_attr "length" "8")
141110d565efSmrg	    (const_string "yes")
141210d565efSmrg            (const_string "no")))
141310d565efSmrg   (set (attr "length")
141410d565efSmrg        (if_then_else
141510d565efSmrg	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
141610d565efSmrg	         (le (minus (match_dup 3) (pc)) (const_int 256)))
141710d565efSmrg	    (const_int 4)
141810d565efSmrg	    (if_then_else
141910d565efSmrg	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
142010d565efSmrg		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
142110d565efSmrg		(const_int 6)
142210d565efSmrg		(const_int 8))))
142310d565efSmrg   (set_attr "type" "multiple")]
142410d565efSmrg)
142510d565efSmrg
142610d565efSmrg(define_insn "*tstsi3_cbranch"
142710d565efSmrg  [(set (pc)
142810d565efSmrg	(if_then_else
142910d565efSmrg	 (match_operator 3 "equality_operator"
143010d565efSmrg	  [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
143110d565efSmrg		   (match_operand:SI 1 "s_register_operand" "l"))
143210d565efSmrg	   (const_int 0)])
143310d565efSmrg	 (label_ref (match_operand 2 "" ""))
143410d565efSmrg	 (pc)))]
143510d565efSmrg  "TARGET_THUMB1"
143610d565efSmrg  "*
143710d565efSmrg  {
143810d565efSmrg  output_asm_insn (\"tst\\t%0, %1\", operands);
143910d565efSmrg  switch (get_attr_length (insn))
144010d565efSmrg    {
144110d565efSmrg    case 4:  return \"b%d3\\t%l2\";
144210d565efSmrg    case 6:  return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
144310d565efSmrg    default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
144410d565efSmrg    }
144510d565efSmrg  }"
144610d565efSmrg  [(set (attr "far_jump")
144710d565efSmrg        (if_then_else
144810d565efSmrg	    (eq_attr "length" "8")
144910d565efSmrg	    (const_string "yes")
145010d565efSmrg            (const_string "no")))
145110d565efSmrg   (set (attr "length")
145210d565efSmrg        (if_then_else
145310d565efSmrg	    (and (ge (minus (match_dup 2) (pc)) (const_int -250))
145410d565efSmrg	         (le (minus (match_dup 2) (pc)) (const_int 256)))
145510d565efSmrg	    (const_int 4)
145610d565efSmrg	    (if_then_else
145710d565efSmrg	        (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
145810d565efSmrg		     (le (minus (match_dup 2) (pc)) (const_int 2048)))
145910d565efSmrg		(const_int 6)
146010d565efSmrg		(const_int 8))))
146110d565efSmrg   (set_attr "type" "multiple")]
146210d565efSmrg)
146310d565efSmrg
146410d565efSmrg(define_insn "*cbranchne_decr1"
146510d565efSmrg  [(set (pc)
146610d565efSmrg	(if_then_else (match_operator 3 "equality_operator"
146710d565efSmrg		       [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
146810d565efSmrg		        (const_int 0)])
146910d565efSmrg		      (label_ref (match_operand 4 "" ""))
147010d565efSmrg		      (pc)))
147110d565efSmrg   (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
147210d565efSmrg	(plus:SI (match_dup 2) (const_int -1)))
147310d565efSmrg   (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
147410d565efSmrg  "TARGET_THUMB1"
147510d565efSmrg  "*
147610d565efSmrg   {
147710d565efSmrg     rtx cond[2];
147810d565efSmrg     cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
147910d565efSmrg				? GEU : LTU),
148010d565efSmrg			       VOIDmode, operands[2], const1_rtx);
148110d565efSmrg     cond[1] = operands[4];
148210d565efSmrg
148310d565efSmrg     if (which_alternative == 0)
148410d565efSmrg       output_asm_insn (\"subs\\t%0, %2, #1\", operands);
148510d565efSmrg     else if (which_alternative == 1)
148610d565efSmrg       {
148710d565efSmrg	 /* We must provide an alternative for a hi reg because reload
148810d565efSmrg	    cannot handle output reloads on a jump instruction, but we
148910d565efSmrg	    can't subtract into that.  Fortunately a mov from lo to hi
149010d565efSmrg	    does not clobber the condition codes.  */
149110d565efSmrg	 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
149210d565efSmrg	 output_asm_insn (\"mov\\t%0, %1\", operands);
149310d565efSmrg       }
149410d565efSmrg     else
149510d565efSmrg       {
149610d565efSmrg	 /* Similarly, but the target is memory.  */
149710d565efSmrg	 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
149810d565efSmrg	 output_asm_insn (\"str\\t%1, %0\", operands);
149910d565efSmrg       }
150010d565efSmrg
150110d565efSmrg     switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
150210d565efSmrg       {
150310d565efSmrg	 case 4:
150410d565efSmrg	   output_asm_insn (\"b%d0\\t%l1\", cond);
150510d565efSmrg	   return \"\";
150610d565efSmrg	 case 6:
150710d565efSmrg	   output_asm_insn (\"b%D0\\t.LCB%=\", cond);
150810d565efSmrg	   return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
150910d565efSmrg	 default:
151010d565efSmrg	   output_asm_insn (\"b%D0\\t.LCB%=\", cond);
151110d565efSmrg	   return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
151210d565efSmrg       }
151310d565efSmrg   }
151410d565efSmrg  "
151510d565efSmrg  [(set (attr "far_jump")
151610d565efSmrg        (if_then_else
151710d565efSmrg	    (ior (and (eq (symbol_ref ("which_alternative"))
151810d565efSmrg	                  (const_int 0))
151910d565efSmrg		      (eq_attr "length" "8"))
152010d565efSmrg		 (eq_attr "length" "10"))
152110d565efSmrg	    (const_string "yes")
152210d565efSmrg            (const_string "no")))
152310d565efSmrg   (set_attr_alternative "length"
152410d565efSmrg      [
152510d565efSmrg       ;; Alternative 0
152610d565efSmrg       (if_then_else
152710d565efSmrg	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
152810d565efSmrg	      (le (minus (match_dup 4) (pc)) (const_int 256)))
152910d565efSmrg	 (const_int 4)
153010d565efSmrg	 (if_then_else
153110d565efSmrg	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
153210d565efSmrg		(le (minus (match_dup 4) (pc)) (const_int 2048)))
153310d565efSmrg	   (const_int 6)
153410d565efSmrg	   (const_int 8)))
153510d565efSmrg       ;; Alternative 1
153610d565efSmrg       (if_then_else
153710d565efSmrg	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
153810d565efSmrg	      (le (minus (match_dup 4) (pc)) (const_int 256)))
153910d565efSmrg	 (const_int 6)
154010d565efSmrg	 (if_then_else
154110d565efSmrg	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
154210d565efSmrg		(le (minus (match_dup 4) (pc)) (const_int 2048)))
154310d565efSmrg	   (const_int 8)
154410d565efSmrg	   (const_int 10)))
154510d565efSmrg       ;; Alternative 2
154610d565efSmrg       (if_then_else
154710d565efSmrg	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
154810d565efSmrg	      (le (minus (match_dup 4) (pc)) (const_int 256)))
154910d565efSmrg	 (const_int 6)
155010d565efSmrg	 (if_then_else
155110d565efSmrg	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
155210d565efSmrg		(le (minus (match_dup 4) (pc)) (const_int 2048)))
155310d565efSmrg	   (const_int 8)
155410d565efSmrg	   (const_int 10)))
155510d565efSmrg       ;; Alternative 3
155610d565efSmrg       (if_then_else
155710d565efSmrg	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
155810d565efSmrg	      (le (minus (match_dup 4) (pc)) (const_int 256)))
155910d565efSmrg	 (const_int 6)
156010d565efSmrg	 (if_then_else
156110d565efSmrg	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
156210d565efSmrg		(le (minus (match_dup 4) (pc)) (const_int 2048)))
156310d565efSmrg	   (const_int 8)
156410d565efSmrg	   (const_int 10)))])
156510d565efSmrg   (set_attr "type" "multiple")]
156610d565efSmrg)
156710d565efSmrg
156810d565efSmrg(define_insn "*addsi3_cbranch"
156910d565efSmrg  [(set (pc)
157010d565efSmrg	(if_then_else
157110d565efSmrg	 (match_operator 4 "arm_comparison_operator"
157210d565efSmrg	  [(plus:SI
157310d565efSmrg	    (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
157410d565efSmrg	    (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
157510d565efSmrg	   (const_int 0)])
157610d565efSmrg	 (label_ref (match_operand 5 "" ""))
157710d565efSmrg	 (pc)))
157810d565efSmrg   (set
157910d565efSmrg    (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
158010d565efSmrg    (plus:SI (match_dup 2) (match_dup 3)))
158110d565efSmrg   (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
158210d565efSmrg  "TARGET_THUMB1
158310d565efSmrg   && (GET_CODE (operands[4]) == EQ
158410d565efSmrg       || GET_CODE (operands[4]) == NE
158510d565efSmrg       || GET_CODE (operands[4]) == GE
158610d565efSmrg       || GET_CODE (operands[4]) == LT)"
158710d565efSmrg  "*
158810d565efSmrg   {
158910d565efSmrg     rtx cond[3];
159010d565efSmrg
159110d565efSmrg     cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
159210d565efSmrg     cond[1] = operands[2];
159310d565efSmrg     cond[2] = operands[3];
159410d565efSmrg
159510d565efSmrg     if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
159610d565efSmrg       output_asm_insn (\"subs\\t%0, %1, #%n2\", cond);
159710d565efSmrg     else
159810d565efSmrg       output_asm_insn (\"adds\\t%0, %1, %2\", cond);
159910d565efSmrg
160010d565efSmrg     if (which_alternative >= 2
160110d565efSmrg	 && which_alternative < 4)
160210d565efSmrg       output_asm_insn (\"mov\\t%0, %1\", operands);
160310d565efSmrg     else if (which_alternative >= 4)
160410d565efSmrg       output_asm_insn (\"str\\t%1, %0\", operands);
160510d565efSmrg
160610d565efSmrg     switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
160710d565efSmrg       {
160810d565efSmrg	 case 4:
160910d565efSmrg	   return \"b%d4\\t%l5\";
161010d565efSmrg	 case 6:
161110d565efSmrg	   return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
161210d565efSmrg	 default:
161310d565efSmrg	   return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
161410d565efSmrg       }
161510d565efSmrg   }
161610d565efSmrg  "
161710d565efSmrg  [(set (attr "far_jump")
161810d565efSmrg        (if_then_else
161910d565efSmrg	    (ior (and (lt (symbol_ref ("which_alternative"))
162010d565efSmrg	                  (const_int 2))
162110d565efSmrg		      (eq_attr "length" "8"))
162210d565efSmrg		 (eq_attr "length" "10"))
162310d565efSmrg	    (const_string "yes")
162410d565efSmrg            (const_string "no")))
162510d565efSmrg   (set (attr "length")
162610d565efSmrg     (if_then_else
162710d565efSmrg       (lt (symbol_ref ("which_alternative"))
162810d565efSmrg		       (const_int 2))
162910d565efSmrg       (if_then_else
163010d565efSmrg	 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
163110d565efSmrg	      (le (minus (match_dup 5) (pc)) (const_int 256)))
163210d565efSmrg	 (const_int 4)
163310d565efSmrg	 (if_then_else
163410d565efSmrg	   (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
163510d565efSmrg		(le (minus (match_dup 5) (pc)) (const_int 2048)))
163610d565efSmrg	   (const_int 6)
163710d565efSmrg	   (const_int 8)))
163810d565efSmrg       (if_then_else
163910d565efSmrg	 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
164010d565efSmrg	      (le (minus (match_dup 5) (pc)) (const_int 256)))
164110d565efSmrg	 (const_int 6)
164210d565efSmrg	 (if_then_else
164310d565efSmrg	   (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
164410d565efSmrg		(le (minus (match_dup 5) (pc)) (const_int 2048)))
164510d565efSmrg	   (const_int 8)
164610d565efSmrg	   (const_int 10)))))
164710d565efSmrg   (set_attr "type" "multiple")]
164810d565efSmrg)
164910d565efSmrg
165010d565efSmrg(define_insn "*addsi3_cbranch_scratch"
165110d565efSmrg  [(set (pc)
165210d565efSmrg	(if_then_else
165310d565efSmrg	 (match_operator 3 "arm_comparison_operator"
165410d565efSmrg	  [(plus:SI
165510d565efSmrg	    (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
165610d565efSmrg	    (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
165710d565efSmrg	   (const_int 0)])
165810d565efSmrg	 (label_ref (match_operand 4 "" ""))
165910d565efSmrg	 (pc)))
166010d565efSmrg   (clobber (match_scratch:SI 0 "=X,X,l,l"))]
166110d565efSmrg  "TARGET_THUMB1
166210d565efSmrg   && (GET_CODE (operands[3]) == EQ
166310d565efSmrg       || GET_CODE (operands[3]) == NE
166410d565efSmrg       || GET_CODE (operands[3]) == GE
166510d565efSmrg       || GET_CODE (operands[3]) == LT)"
166610d565efSmrg  "*
166710d565efSmrg   {
166810d565efSmrg     switch (which_alternative)
166910d565efSmrg       {
167010d565efSmrg       case 0:
167110d565efSmrg	 output_asm_insn (\"cmp\t%1, #%n2\", operands);
167210d565efSmrg	 break;
167310d565efSmrg       case 1:
167410d565efSmrg	 output_asm_insn (\"cmn\t%1, %2\", operands);
167510d565efSmrg	 break;
167610d565efSmrg       case 2:
167710d565efSmrg	 if (INTVAL (operands[2]) < 0)
167810d565efSmrg	   output_asm_insn (\"subs\t%0, %1, %2\", operands);
167910d565efSmrg	 else
168010d565efSmrg	   output_asm_insn (\"adds\t%0, %1, %2\", operands);
168110d565efSmrg	 break;
168210d565efSmrg       case 3:
168310d565efSmrg	 if (INTVAL (operands[2]) < 0)
168410d565efSmrg	   output_asm_insn (\"subs\t%0, %0, %2\", operands);
168510d565efSmrg	 else
168610d565efSmrg	   output_asm_insn (\"adds\t%0, %0, %2\", operands);
168710d565efSmrg	 break;
168810d565efSmrg       }
168910d565efSmrg
169010d565efSmrg     switch (get_attr_length (insn))
169110d565efSmrg       {
169210d565efSmrg	 case 4:
169310d565efSmrg	   return \"b%d3\\t%l4\";
169410d565efSmrg	 case 6:
169510d565efSmrg	   return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
169610d565efSmrg	 default:
169710d565efSmrg	   return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
169810d565efSmrg       }
169910d565efSmrg   }
170010d565efSmrg  "
170110d565efSmrg  [(set (attr "far_jump")
170210d565efSmrg        (if_then_else
170310d565efSmrg	    (eq_attr "length" "8")
170410d565efSmrg	    (const_string "yes")
170510d565efSmrg            (const_string "no")))
170610d565efSmrg   (set (attr "length")
170710d565efSmrg       (if_then_else
170810d565efSmrg	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
170910d565efSmrg	      (le (minus (match_dup 4) (pc)) (const_int 256)))
171010d565efSmrg	 (const_int 4)
171110d565efSmrg	 (if_then_else
171210d565efSmrg	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
171310d565efSmrg		(le (minus (match_dup 4) (pc)) (const_int 2048)))
171410d565efSmrg	   (const_int 6)
171510d565efSmrg	   (const_int 8))))
171610d565efSmrg   (set_attr "type" "multiple")]
171710d565efSmrg)
171810d565efSmrg
171910d565efSmrg(define_insn "*thumb_cmpdi_zero"
172010d565efSmrg  [(set (reg:CC_Z CC_REGNUM)
172110d565efSmrg	(compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
172210d565efSmrg		      (const_int 0)))
172310d565efSmrg   (clobber (match_scratch:SI 1 "=l"))]
172410d565efSmrg  "TARGET_THUMB1"
172510d565efSmrg  "orrs\\t%1, %Q0, %R0"
172610d565efSmrg  [(set_attr "conds" "set")
172710d565efSmrg   (set_attr "length" "2")
172810d565efSmrg   (set_attr "type" "logics_reg")]
172910d565efSmrg)
173010d565efSmrg
173110d565efSmrg(define_expand "cstoresi_eq0_thumb1"
173210d565efSmrg  [(parallel
1733*ec02198aSmrg    [(set (match_operand:SI 0 "s_register_operand")
1734*ec02198aSmrg	  (eq:SI (match_operand:SI 1 "s_register_operand")
173510d565efSmrg		 (const_int 0)))
173610d565efSmrg     (clobber (match_dup:SI 2))])]
173710d565efSmrg  "TARGET_THUMB1"
173810d565efSmrg  "operands[2] = gen_reg_rtx (SImode);"
173910d565efSmrg)
174010d565efSmrg
174110d565efSmrg(define_expand "cstoresi_ne0_thumb1"
174210d565efSmrg  [(parallel
1743*ec02198aSmrg    [(set (match_operand:SI 0 "s_register_operand")
1744*ec02198aSmrg	  (ne:SI (match_operand:SI 1 "s_register_operand")
174510d565efSmrg		 (const_int 0)))
174610d565efSmrg     (clobber (match_dup:SI 2))])]
174710d565efSmrg  "TARGET_THUMB1"
174810d565efSmrg  "operands[2] = gen_reg_rtx (SImode);"
174910d565efSmrg)
175010d565efSmrg
175110d565efSmrg(define_insn "*cstoresi_eq0_thumb1_insn"
175210d565efSmrg  [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
175310d565efSmrg	(eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
175410d565efSmrg	       (const_int 0)))
175510d565efSmrg   (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
175610d565efSmrg  "TARGET_THUMB1"
175710d565efSmrg  "@
175810d565efSmrg   rsbs\\t%0, %1, #0\;adcs\\t%0, %0, %1
175910d565efSmrg   rsbs\\t%2, %1, #0\;adcs\\t%0, %1, %2"
176010d565efSmrg  [(set_attr "length" "4")
176110d565efSmrg   (set_attr "type" "multiple")]
176210d565efSmrg)
176310d565efSmrg
176410d565efSmrg(define_insn "*cstoresi_ne0_thumb1_insn"
176510d565efSmrg  [(set (match_operand:SI 0 "s_register_operand" "=l")
176610d565efSmrg	(ne:SI (match_operand:SI 1 "s_register_operand" "0")
176710d565efSmrg	       (const_int 0)))
176810d565efSmrg   (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
176910d565efSmrg  "TARGET_THUMB1"
177010d565efSmrg  "subs\\t%2, %1, #1\;sbcs\\t%0, %1, %2"
177110d565efSmrg  [(set_attr "length" "4")]
177210d565efSmrg)
177310d565efSmrg
177410d565efSmrg;; Used as part of the expansion of thumb ltu and gtu sequences
177510d565efSmrg(define_insn "cstoresi_nltu_thumb1"
177610d565efSmrg  [(set (match_operand:SI 0 "s_register_operand" "=l,l")
177710d565efSmrg        (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
177810d565efSmrg			(match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
177910d565efSmrg  "TARGET_THUMB1"
178010d565efSmrg  "cmp\\t%1, %2\;sbcs\\t%0, %0, %0"
178110d565efSmrg  [(set_attr "length" "4")
178210d565efSmrg   (set_attr "type" "multiple")]
178310d565efSmrg)
178410d565efSmrg
178510d565efSmrg(define_insn_and_split "cstoresi_ltu_thumb1"
178610d565efSmrg  [(set (match_operand:SI 0 "s_register_operand" "=l,l")
178710d565efSmrg        (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
178810d565efSmrg		(match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
178910d565efSmrg  "TARGET_THUMB1"
179010d565efSmrg  "#"
179110d565efSmrg  "TARGET_THUMB1"
179210d565efSmrg  [(set (match_dup 3)
179310d565efSmrg	(neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
179410d565efSmrg   (set (match_dup 0) (neg:SI (match_dup 3)))]
179510d565efSmrg  "operands[3] = gen_reg_rtx (SImode);"
179610d565efSmrg  [(set_attr "length" "4")
179710d565efSmrg   (set_attr "type" "multiple")]
179810d565efSmrg)
179910d565efSmrg
180010d565efSmrg;; Used as part of the expansion of thumb les sequence.
180110d565efSmrg(define_insn "thumb1_addsi3_addgeu"
180210d565efSmrg  [(set (match_operand:SI 0 "s_register_operand" "=l")
180310d565efSmrg        (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
180410d565efSmrg			  (match_operand:SI 2 "s_register_operand" "l"))
180510d565efSmrg		 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
180610d565efSmrg			 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
180710d565efSmrg  "TARGET_THUMB1"
180810d565efSmrg  "cmp\\t%3, %4\;adcs\\t%0, %1, %2"
180910d565efSmrg  [(set_attr "length" "4")
181010d565efSmrg   (set_attr "type" "multiple")]
181110d565efSmrg)
181210d565efSmrg
181310d565efSmrg
181410d565efSmrg(define_insn "*thumb_jump"
181510d565efSmrg  [(set (pc)
181610d565efSmrg	(label_ref (match_operand 0 "" "")))]
181710d565efSmrg  "TARGET_THUMB1"
181810d565efSmrg  "*
181910d565efSmrg  if (get_attr_length (insn) == 2)
182010d565efSmrg    return \"b\\t%l0\";
182110d565efSmrg  return \"bl\\t%l0\\t%@ far jump\";
182210d565efSmrg  "
182310d565efSmrg  [(set (attr "far_jump")
182410d565efSmrg        (if_then_else
182510d565efSmrg	    (eq_attr "length" "4")
182610d565efSmrg	    (const_string "yes")
182710d565efSmrg	    (const_string "no")))
182810d565efSmrg   (set (attr "length")
182910d565efSmrg        (if_then_else
183010d565efSmrg	    (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
183110d565efSmrg		 (le (minus (match_dup 0) (pc)) (const_int 2048)))
183210d565efSmrg  	    (const_int 2)
183310d565efSmrg	    (const_int 4)))
183410d565efSmrg   (set_attr "type" "branch")]
183510d565efSmrg)
183610d565efSmrg
183710d565efSmrg(define_insn "*call_reg_thumb1_v5"
183810d565efSmrg  [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
183910d565efSmrg	 (match_operand 1 "" ""))
184010d565efSmrg   (use (match_operand 2 "" ""))
184110d565efSmrg   (clobber (reg:SI LR_REGNUM))]
18420fc04c29Smrg  "TARGET_THUMB1 && arm_arch5t && !SIBLING_CALL_P (insn)"
184310d565efSmrg  "blx\\t%0"
184410d565efSmrg  [(set_attr "length" "2")
184510d565efSmrg   (set_attr "type" "call")]
184610d565efSmrg)
184710d565efSmrg
184810d565efSmrg(define_insn "*nonsecure_call_reg_thumb1_v5"
1849c7a68eb7Smrg  [(call (unspec:SI [(mem:SI (reg:SI R4_REGNUM))]
185010d565efSmrg		    UNSPEC_NONSECURE_MEM)
1851c7a68eb7Smrg	 (match_operand 0 "" ""))
1852c7a68eb7Smrg   (use (match_operand 1 "" ""))
1853c7a68eb7Smrg   (clobber (reg:SI LR_REGNUM))]
185410d565efSmrg  "TARGET_THUMB1 && use_cmse && !SIBLING_CALL_P (insn)"
185510d565efSmrg  "bl\\t__gnu_cmse_nonsecure_call"
185610d565efSmrg  [(set_attr "length" "4")
185710d565efSmrg   (set_attr "type" "call")]
185810d565efSmrg)
185910d565efSmrg
186010d565efSmrg(define_insn "*call_reg_thumb1"
186110d565efSmrg  [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
186210d565efSmrg	 (match_operand 1 "" ""))
186310d565efSmrg   (use (match_operand 2 "" ""))
186410d565efSmrg   (clobber (reg:SI LR_REGNUM))]
18650fc04c29Smrg  "TARGET_THUMB1 && !arm_arch5t && !SIBLING_CALL_P (insn)"
186610d565efSmrg  "*
186710d565efSmrg  {
186810d565efSmrg    if (!TARGET_CALLER_INTERWORKING)
186910d565efSmrg      return thumb_call_via_reg (operands[0]);
187010d565efSmrg    else if (operands[1] == const0_rtx)
187110d565efSmrg      return \"bl\\t%__interwork_call_via_%0\";
187210d565efSmrg    else if (frame_pointer_needed)
187310d565efSmrg      return \"bl\\t%__interwork_r7_call_via_%0\";
187410d565efSmrg    else
187510d565efSmrg      return \"bl\\t%__interwork_r11_call_via_%0\";
187610d565efSmrg  }"
187710d565efSmrg  [(set_attr "type" "call")]
187810d565efSmrg)
187910d565efSmrg
188010d565efSmrg(define_insn "*call_value_reg_thumb1_v5"
188110d565efSmrg  [(set (match_operand 0 "" "")
188210d565efSmrg	(call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
188310d565efSmrg	      (match_operand 2 "" "")))
188410d565efSmrg   (use (match_operand 3 "" ""))
188510d565efSmrg   (clobber (reg:SI LR_REGNUM))]
18860fc04c29Smrg  "TARGET_THUMB1 && arm_arch5t"
188710d565efSmrg  "blx\\t%1"
188810d565efSmrg  [(set_attr "length" "2")
188910d565efSmrg   (set_attr "type" "call")]
189010d565efSmrg)
189110d565efSmrg
189210d565efSmrg(define_insn "*nonsecure_call_value_reg_thumb1_v5"
189310d565efSmrg  [(set (match_operand 0 "" "")
189410d565efSmrg	(call (unspec:SI
1895c7a68eb7Smrg	       [(mem:SI (reg:SI R4_REGNUM))]
189610d565efSmrg	       UNSPEC_NONSECURE_MEM)
1897c7a68eb7Smrg	      (match_operand 1 "" "")))
1898c7a68eb7Smrg   (use (match_operand 2 "" ""))
1899c7a68eb7Smrg   (clobber (reg:SI LR_REGNUM))]
190010d565efSmrg  "TARGET_THUMB1 && use_cmse"
190110d565efSmrg  "bl\\t__gnu_cmse_nonsecure_call"
190210d565efSmrg  [(set_attr "length" "4")
190310d565efSmrg   (set_attr "type" "call")]
190410d565efSmrg)
190510d565efSmrg
190610d565efSmrg(define_insn "*call_value_reg_thumb1"
190710d565efSmrg  [(set (match_operand 0 "" "")
190810d565efSmrg	(call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
190910d565efSmrg	      (match_operand 2 "" "")))
191010d565efSmrg   (use (match_operand 3 "" ""))
191110d565efSmrg   (clobber (reg:SI LR_REGNUM))]
19120fc04c29Smrg  "TARGET_THUMB1 && !arm_arch5t"
191310d565efSmrg  "*
191410d565efSmrg  {
191510d565efSmrg    if (!TARGET_CALLER_INTERWORKING)
191610d565efSmrg      return thumb_call_via_reg (operands[1]);
191710d565efSmrg    else if (operands[2] == const0_rtx)
191810d565efSmrg      return \"bl\\t%__interwork_call_via_%1\";
191910d565efSmrg    else if (frame_pointer_needed)
192010d565efSmrg      return \"bl\\t%__interwork_r7_call_via_%1\";
192110d565efSmrg    else
192210d565efSmrg      return \"bl\\t%__interwork_r11_call_via_%1\";
192310d565efSmrg  }"
192410d565efSmrg  [(set_attr "type" "call")]
192510d565efSmrg)
192610d565efSmrg
192710d565efSmrg(define_insn "*call_insn"
192810d565efSmrg  [(call (mem:SI (match_operand:SI 0 "" ""))
192910d565efSmrg	 (match_operand:SI 1 "" ""))
193010d565efSmrg   (use (match_operand 2 "" ""))
193110d565efSmrg   (clobber (reg:SI LR_REGNUM))]
193210d565efSmrg  "TARGET_THUMB1
193310d565efSmrg   && GET_CODE (operands[0]) == SYMBOL_REF
193410d565efSmrg   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
193510d565efSmrg  "bl\\t%a0"
193610d565efSmrg  [(set_attr "length" "4")
193710d565efSmrg   (set_attr "type" "call")]
193810d565efSmrg)
193910d565efSmrg
194010d565efSmrg(define_insn "*call_value_insn"
194110d565efSmrg  [(set (match_operand 0 "" "")
194210d565efSmrg	(call (mem:SI (match_operand 1 "" ""))
194310d565efSmrg	      (match_operand 2 "" "")))
194410d565efSmrg   (use (match_operand 3 "" ""))
194510d565efSmrg   (clobber (reg:SI LR_REGNUM))]
194610d565efSmrg  "TARGET_THUMB1
194710d565efSmrg   && GET_CODE (operands[1]) == SYMBOL_REF
194810d565efSmrg   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
194910d565efSmrg  "bl\\t%a1"
195010d565efSmrg  [(set_attr "length" "4")
195110d565efSmrg   (set_attr "type" "call")]
195210d565efSmrg)
195310d565efSmrg
195410d565efSmrg(define_expand "thumb1_casesi_internal_pic"
1955*ec02198aSmrg  [(match_operand:SI 0 "s_register_operand")
1956*ec02198aSmrg   (match_operand:SI 1 "thumb1_cmp_operand")
195710d565efSmrg   (match_operand 2 "" "")
195810d565efSmrg   (match_operand 3 "" "")]
195910d565efSmrg  "TARGET_THUMB1"
196010d565efSmrg  {
196110d565efSmrg    rtx reg0;
196210d565efSmrg    rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
196310d565efSmrg    emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
196410d565efSmrg				    operands[3]));
196510d565efSmrg    reg0 = gen_rtx_REG (SImode, 0);
196610d565efSmrg    emit_move_insn (reg0, operands[0]);
196710d565efSmrg    emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
196810d565efSmrg    DONE;
196910d565efSmrg  }
197010d565efSmrg)
197110d565efSmrg
197210d565efSmrg(define_insn "thumb1_casesi_dispatch"
197310d565efSmrg  [(parallel [(set (pc) (unspec [(reg:SI 0)
197410d565efSmrg				 (label_ref (match_operand 0 "" ""))
197510d565efSmrg;;				 (label_ref (match_operand 1 "" ""))
197610d565efSmrg]
197710d565efSmrg			 UNSPEC_THUMB1_CASESI))
197810d565efSmrg	      (clobber (reg:SI IP_REGNUM))
197910d565efSmrg              (clobber (reg:SI LR_REGNUM))])]
198010d565efSmrg  "TARGET_THUMB1"
198110d565efSmrg  "* return thumb1_output_casesi(operands);"
198210d565efSmrg  [(set_attr "length" "4")
198310d565efSmrg   (set_attr "type" "multiple")]
198410d565efSmrg)
198510d565efSmrg
198610d565efSmrg;; NB Never uses BX.
198710d565efSmrg(define_insn "*thumb1_indirect_jump"
198810d565efSmrg  [(set (pc)
198910d565efSmrg	(match_operand:SI 0 "register_operand" "l*r"))]
199010d565efSmrg  "TARGET_THUMB1"
199110d565efSmrg  "mov\\tpc, %0"
199210d565efSmrg  [(set_attr "conds" "clob")
199310d565efSmrg   (set_attr "length" "2")
199410d565efSmrg   (set_attr "type" "branch")]
199510d565efSmrg)
199610d565efSmrg
199710d565efSmrg
199810d565efSmrg(define_insn "prologue_thumb1_interwork"
199910d565efSmrg  [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
200010d565efSmrg  "TARGET_THUMB1"
200110d565efSmrg  "* return thumb1_output_interwork ();"
200210d565efSmrg  [(set_attr "length" "8")
200310d565efSmrg   (set_attr "type" "multiple")]
200410d565efSmrg)
200510d565efSmrg
200610d565efSmrg(define_insn "*epilogue_insns"
200710d565efSmrg  [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
200810d565efSmrg  "TARGET_THUMB1"
200910d565efSmrg  "*
201010d565efSmrg    return thumb1_unexpanded_epilogue ();
201110d565efSmrg  "
201210d565efSmrg  ; Length is absolute worst case, when using CMSE and if this is an entry
201310d565efSmrg  ; function an extra 4 (MSR) bytes will be added.
201410d565efSmrg  [(set (attr "length")
201510d565efSmrg	(if_then_else
201610d565efSmrg	 (match_test "IS_CMSE_ENTRY (arm_current_func_type ())")
201710d565efSmrg	 (const_int 48)
201810d565efSmrg	 (const_int 44)))
201910d565efSmrg   (set_attr "type" "block")
202010d565efSmrg   ;; We don't clobber the conditions, but the potential length of this
202110d565efSmrg   ;; operation is sufficient to make conditionalizing the sequence
202210d565efSmrg   ;; unlikely to be profitable.
202310d565efSmrg   (set_attr "conds" "clob")]
202410d565efSmrg)
202510d565efSmrg
202610d565efSmrg;; Miscellaneous Thumb patterns
202710d565efSmrg(define_expand "tablejump"
2028*ec02198aSmrg  [(parallel [(set (pc) (match_operand:SI 0 "register_operand"))
202910d565efSmrg	      (use (label_ref (match_operand 1 "" "")))])]
203010d565efSmrg  "TARGET_THUMB1"
203110d565efSmrg  "
203210d565efSmrg  if (flag_pic)
203310d565efSmrg    {
203410d565efSmrg      /* Hopefully, CSE will eliminate this copy.  */
203510d565efSmrg      rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
203610d565efSmrg      rtx reg2 = gen_reg_rtx (SImode);
203710d565efSmrg
203810d565efSmrg      emit_insn (gen_addsi3 (reg2, operands[0], reg1));
203910d565efSmrg      operands[0] = reg2;
204010d565efSmrg    }
204110d565efSmrg  "
204210d565efSmrg)
204310d565efSmrg
204410d565efSmrg(define_insn "*thumb1_movpc_insn"
204510d565efSmrg  [(set (match_operand:SI 0 "s_register_operand" "=l")
204610d565efSmrg	(reg:SI PC_REGNUM))]
204710d565efSmrg  "TARGET_THUMB1"
204810d565efSmrg  "mov\\t%0, pc"
204910d565efSmrg  [(set_attr "length" "2")
205010d565efSmrg   (set_attr "conds"  "nocond")
205110d565efSmrg   (set_attr "type"   "mov_reg")]
205210d565efSmrg)
205310d565efSmrg
205410d565efSmrg;; NB never uses BX.
205510d565efSmrg(define_insn "*thumb1_tablejump"
205610d565efSmrg  [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
205710d565efSmrg   (use (label_ref (match_operand 1 "" "")))]
205810d565efSmrg  "TARGET_THUMB1"
205910d565efSmrg  "mov\\t%|pc, %0"
206010d565efSmrg  [(set_attr "length" "2")
2061*ec02198aSmrg   (set_attr "type" "branch")]
206210d565efSmrg)
206310d565efSmrg
206410d565efSmrg(define_insn_and_split "thumb_eh_return"
206510d565efSmrg  [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
206610d565efSmrg		    VUNSPEC_EH_RETURN)
206710d565efSmrg   (clobber (match_scratch:SI 1 "=&l"))]
206810d565efSmrg  "TARGET_THUMB1"
206910d565efSmrg  "#"
207010d565efSmrg  "&& reload_completed"
207110d565efSmrg  [(const_int 0)]
207210d565efSmrg  "
207310d565efSmrg  {
207410d565efSmrg    thumb_set_return_address (operands[0], operands[1]);
207510d565efSmrg    DONE;
207610d565efSmrg  }"
207710d565efSmrg  [(set_attr "type" "mov_reg")]
207810d565efSmrg)
20790fc04c29Smrg
2080*ec02198aSmrg;; DO NOT SPLIT THIS PATTERN.  It is important for security reasons that the
2081*ec02198aSmrg;; canary value does not live beyond the end of this sequence.
20820fc04c29Smrg(define_insn "thumb1_stack_protect_test_insn"
20830fc04c29Smrg  [(set (match_operand:SI 0 "register_operand" "=&l")
20840fc04c29Smrg	(unspec:SI [(match_operand:SI 1 "memory_operand" "m")
20850fc04c29Smrg		    (mem:SI (match_operand:SI 2 "register_operand" "+l"))]
20860fc04c29Smrg	 UNSPEC_SP_TEST))
20870fc04c29Smrg   (clobber (match_dup 2))]
20880fc04c29Smrg  "TARGET_THUMB1"
2089*ec02198aSmrg  "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;movs\t%2, #0"
2090*ec02198aSmrg  [(set_attr "length" "10")
2091*ec02198aSmrg   (set_attr "conds" "clob")
20920fc04c29Smrg   (set_attr "type" "multiple")]
20930fc04c29Smrg)
209410d565efSmrg
2095