1;; ARM Thumb-1 Machine Description
2;; Copyright (C) 2007-2021 Free Software Foundation, Inc.
3;;
4;; This file is part of GCC.
5;;
6;; GCC is free software; you can redistribute it and/or modify it
7;; under the terms of the GNU General Public License as published by
8;; the Free Software Foundation; either version 3, or (at your option)
9;; any later version.
10;;
11;; GCC is distributed in the hope that it will be useful, but
12;; WITHOUT ANY WARRANTY; without even the implied warranty of
13;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14;; General Public License for more details.
15;;
16;; You should have received a copy of the GNU General Public License
17;; along with GCC; see the file COPYING3.  If not see
18;; <http://www.gnu.org/licenses/>.  */
19
20
21;;---------------------------------------------------------------------------
22;; Insn patterns
23;;
24
25;; Beware of splitting Thumb1 patterns that output multiple
26;; assembly instructions, in particular instruction such as SBC and
27;; ADC which consume flags.  For example, in the pattern thumb_subdi3
28;; below, the output SUB implicitly sets the flags (assembled to SUBS)
29;; and then the Carry flag is used by SBC to compute the correct
30;; result.  If we split thumb_subdi3 pattern into two separate RTL
31;; insns (using define_insn_and_split), the scheduler might place
32;; other RTL insns between SUB and SBC, possibly modifying the Carry
33;; flag used by SBC.  This might happen because most Thumb1 patterns
34;; for flag-setting instructions do not have explicit RTL for setting
35;; or clobbering the flags.  Instead, they have the attribute "conds"
36;; with value "set" or "clob".  However, this attribute is not used to
37;; identify dependencies and therefore the scheduler might reorder
38;; these instruction.  Currenly, this problem cannot happen because
39;; there are no separate Thumb1 patterns for individual instruction
40;; that consume flags (except conditional execution, which is treated
41;; differently).  In particular there is no Thumb1 armv6-m pattern for
42;; sbc or adc.
43
44
45
46(define_insn "*thumb1_adddi3"
47  [(set (match_operand:DI          0 "register_operand" "=l")
48	(plus:DI (match_operand:DI 1 "register_operand" "%0")
49		 (match_operand:DI 2 "register_operand" "l")))
50   (clobber (reg:CC CC_REGNUM))
51  ]
52  "TARGET_THUMB1"
53  "adds\\t%Q0, %Q0, %Q2\;adcs\\t%R0, %R0, %R2"
54  [(set_attr "length" "4")
55   (set_attr "type" "multiple")]
56)
57
58;; Changes to the constraints of this pattern must be propagated to those of
59;; atomic additions in sync.md and to the logic for bind_old_new in
60;; arm_split_atomic_op in arm.c.  These must be at least as strict as the
61;; constraints here and aim to be as permissive.
62(define_insn_and_split "*thumb1_addsi3"
63  [(set (match_operand:SI          0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
64	(plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
65		 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
66  "TARGET_THUMB1"
67  "*
68   static const char * const asms[] =
69   {
70     \"adds\\t%0, %0, %2\",
71     \"subs\\t%0, %0, #%n2\",
72     \"adds\\t%0, %1, %2\",
73     \"add\\t%0, %0, %2\",
74     \"add\\t%0, %0, %2\",
75     \"add\\t%0, %1, %2\",
76     \"add\\t%0, %1, %2\",
77     \"#\",
78     \"#\",
79     \"#\"
80   };
81   if ((which_alternative == 2 || which_alternative == 6)
82       && CONST_INT_P (operands[2])
83       && INTVAL (operands[2]) < 0)
84     return (which_alternative == 2) ? \"subs\\t%0, %1, #%n2\" : \"sub\\t%0, %1, #%n2\";
85   return asms[which_alternative];
86  "
87  "&& reload_completed && CONST_INT_P (operands[2])
88   && ((operands[1] != stack_pointer_rtx
89        && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
90       || (operands[1] == stack_pointer_rtx
91 	   && INTVAL (operands[2]) > 1020))"
92  [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
93   (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
94  {
95    HOST_WIDE_INT offset = INTVAL (operands[2]);
96    if (operands[1] == stack_pointer_rtx)
97      offset -= 1020;
98    else
99      {
100        if (offset > 255)
101	  offset = 255;
102	else if (offset < -255)
103	  offset = -255;
104      }
105    operands[3] = GEN_INT (offset);
106    operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
107  }
108  [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")
109   (set_attr "type" "alus_imm,alus_imm,alus_sreg,alus_sreg,alus_sreg,
110		     alus_sreg,alus_sreg,multiple,multiple,multiple")]
111)
112
113;; Reloading and elimination of the frame pointer can
114;; sometimes cause this optimization to be missed.
115(define_peephole2
116  [(set (match_operand:SI 0 "arm_general_register_operand" "")
117	(match_operand:SI 1 "const_int_operand" ""))
118   (set (match_dup 0)
119	(plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
120  "TARGET_THUMB1
121   && UINTVAL (operands[1]) < 1024
122   && (UINTVAL (operands[1]) & 3) == 0"
123  [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
124  ""
125)
126
127(define_insn "*thumb_subdi3"
128  [(set (match_operand:DI           0 "register_operand" "=l")
129	(minus:DI (match_operand:DI 1 "register_operand"  "0")
130		  (match_operand:DI 2 "register_operand"  "l")))
131   (clobber (reg:CC CC_REGNUM))]
132  "TARGET_THUMB1"
133  "subs\\t%Q0, %Q0, %Q2\;sbcs\\t%R0, %R0, %R2"
134  [(set_attr "length" "4")
135   (set_attr "type" "multiple")]
136)
137
138;; Changes to the constraints of this pattern must be propagated to those of
139;; atomic subtractions in sync.md and to the logic for bind_old_new in
140;; arm_split_atomic_op in arm.c.  These must be at least as strict as the
141;; constraints here and aim to be as permissive.
142(define_insn "thumb1_subsi3_insn"
143  [(set (match_operand:SI           0 "register_operand" "=l")
144	(minus:SI (match_operand:SI 1 "register_operand" "l")
145		  (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
146  "TARGET_THUMB1"
147  "subs\\t%0, %1, %2"
148  [(set_attr "length" "2")
149   (set_attr "conds" "set")
150   (set_attr "type" "alus_sreg")]
151)
152
153;; Unfortunately on Thumb the '&'/'0' trick can fail when operands
154;; 1 and 2 are the same, because reload will make operand 0 match
155;; operand 1 without realizing that this conflicts with operand 2.  We fix
156;; this by adding another alternative to match this case, and then `reload'
157;; it ourselves.  This alternative must come first.
158(define_insn "*thumb_mulsi3"
159  [(set (match_operand:SI          0 "register_operand" "=&l,&l,&l")
160	(mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
161		 (match_operand:SI 2 "register_operand" "l,l,l")))]
162 "TARGET_THUMB1 && !arm_arch6"
163  "@
164   movs\\t%0, %1\;muls\\t%0, %2
165   mov\\t%0, %1\;muls\\t%0, %2
166   muls\\t%0, %2"
167  [(set_attr "length" "4,4,2")
168   (set_attr "type" "muls")]
169)
170
171(define_insn "*thumb_mulsi3_v6"
172  [(set (match_operand:SI          0 "register_operand" "=l,l,l")
173	(mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
174		 (match_operand:SI 2 "register_operand" "l,0,0")))]
175  "TARGET_THUMB1 && arm_arch6"
176  "@
177   muls\\t%0, %2
178   muls\\t%0, %1
179   muls\\t%0, %1"
180  [(set_attr "length" "2")
181   (set_attr "type" "muls")]
182)
183
184;; Changes to the constraints of this pattern must be propagated to those of
185;; atomic bitwise ANDs and NANDs in sync.md and to the logic for bind_old_new
186;; in arm_split_atomic_op in arm.c.  These must be at least as strict as the
187;; constraints here and aim to be as permissive.
188(define_insn "*thumb1_andsi3_insn"
189  [(set (match_operand:SI         0 "register_operand" "=l")
190	(and:SI (match_operand:SI 1 "register_operand" "%0")
191		(match_operand:SI 2 "register_operand" "l")))]
192  "TARGET_THUMB1"
193  "ands\\t%0, %2"
194  [(set_attr "length" "2")
195   (set_attr "type"  "logic_imm")
196   (set_attr "conds" "set")])
197
198(define_split
199  [(set (match_operand:SI 0 "s_register_operand" "")
200	(zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
201			 (match_operand:SI 2 "const_int_operand" "")
202			 (match_operand:SI 3 "const_int_operand" "")))
203   (clobber (match_operand:SI 4 "s_register_operand" ""))]
204  "TARGET_THUMB1"
205  [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
206   (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
207  "{
208     HOST_WIDE_INT temp = INTVAL (operands[2]);
209
210     operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
211     operands[3] = GEN_INT (32 - temp);
212   }"
213)
214
215(define_split
216  [(set (match_operand:SI 0 "s_register_operand" "")
217	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
218			 (match_operand:SI 2 "const_int_operand" "")
219			 (match_operand:SI 3 "const_int_operand" "")))]
220  "TARGET_THUMB1"
221  [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
222   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
223  "{
224     HOST_WIDE_INT temp = INTVAL (operands[2]);
225
226     operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
227     operands[3] = GEN_INT (32 - temp);
228   }"
229)
230
231(define_insn "thumb1_bicsi3"
232  [(set (match_operand:SI                 0 "register_operand" "=l")
233	(and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
234		(match_operand:SI         2 "register_operand" "0")))]
235  "TARGET_THUMB1"
236  "bics\\t%0, %1"
237  [(set_attr "length" "2")
238   (set_attr "conds" "set")
239   (set_attr "type" "logics_reg")]
240)
241
242;; Changes to the constraints of this pattern must be propagated to those of
243;; atomic inclusive ORs in sync.md and to the logic for bind_old_new in
244;; arm_split_atomic_op in arm.c.  These must be at least as strict as the
245;; constraints here and aim to be as permissive.
246(define_insn "*thumb1_iorsi3_insn"
247  [(set (match_operand:SI         0 "register_operand" "=l")
248	(ior:SI (match_operand:SI 1 "register_operand" "%0")
249		(match_operand:SI 2 "register_operand" "l")))]
250  "TARGET_THUMB1"
251  "orrs\\t%0, %2"
252  [(set_attr "length" "2")
253   (set_attr "conds" "set")
254   (set_attr "type" "logics_reg")])
255
256;; Changes to the constraints of this pattern must be propagated to those of
257;; atomic exclusive ORs in sync.md and to the logic for bind_old_new in
258;; arm_split_atomic_op in arm.c.  These must be at least as strict as the
259;; constraints here and aim to be as permissive.
260(define_insn "*thumb1_xorsi3_insn"
261  [(set (match_operand:SI         0 "register_operand" "=l")
262	(xor:SI (match_operand:SI 1 "register_operand" "%0")
263		(match_operand:SI 2 "register_operand" "l")))]
264  "TARGET_THUMB1"
265  "eors\\t%0, %2"
266  [(set_attr "length" "2")
267   (set_attr "conds" "set")
268   (set_attr "type" "logics_reg")]
269)
270
271(define_insn "*thumb1_ashlsi3"
272  [(set (match_operand:SI            0 "register_operand" "=l,l")
273	(ashift:SI (match_operand:SI 1 "register_operand" "l,0")
274		   (match_operand:SI 2 "nonmemory_operand" "N,l")))]
275  "TARGET_THUMB1"
276  "lsls\\t%0, %1, %2"
277  [(set_attr "length" "2")
278   (set_attr "type" "shift_imm,shift_reg")
279   (set_attr "conds" "set")])
280
281(define_insn "*thumb1_ashrsi3"
282  [(set (match_operand:SI              0 "register_operand" "=l,l")
283	(ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
284		     (match_operand:SI 2 "nonmemory_operand" "N,l")))]
285  "TARGET_THUMB1"
286  "asrs\\t%0, %1, %2"
287  [(set_attr "length" "2")
288   (set_attr "type" "shift_imm,shift_reg")
289   (set_attr "conds" "set")])
290
291(define_insn "*thumb1_lshrsi3"
292  [(set (match_operand:SI              0 "register_operand" "=l,l")
293	(lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
294		     (match_operand:SI 2 "nonmemory_operand" "N,l")))]
295  "TARGET_THUMB1"
296  "lsrs\\t%0, %1, %2"
297  [(set_attr "length" "2")
298   (set_attr "type" "shift_imm,shift_reg")
299   (set_attr "conds" "set")])
300
301(define_insn "*thumb1_rotrsi3"
302  [(set (match_operand:SI              0 "register_operand" "=l")
303	(rotatert:SI (match_operand:SI 1 "register_operand" "0")
304		     (match_operand:SI 2 "register_operand" "l")))]
305  "TARGET_THUMB1"
306  "rors\\t%0, %0, %2"
307  [(set_attr "type" "shift_reg")
308   (set_attr "length" "2")]
309)
310
311(define_insn "*thumb1_negdi2"
312  [(set (match_operand:DI 0 "register_operand" "=&l")
313	(neg:DI (match_operand:DI 1 "register_operand" "l")))
314   (clobber (reg:CC CC_REGNUM))]
315  "TARGET_THUMB1"
316  "movs\\t%R0, #0\;rsbs\\t%Q0, %Q1, #0\;sbcs\\t%R0, %R1"
317  [(set_attr "length" "6")
318   (set_attr "type" "multiple")]
319)
320
321(define_insn "*thumb1_negsi2"
322  [(set (match_operand:SI         0 "register_operand" "=l")
323	(neg:SI (match_operand:SI 1 "register_operand" "l")))]
324  "TARGET_THUMB1"
325  "rsbs\\t%0, %1, #0"
326  [(set_attr "length" "2")
327   (set_attr "type" "alu_imm")]
328)
329
330(define_insn_and_split "*thumb1_abssi2"
331  [(set (match_operand:SI 0 "s_register_operand" "=l")
332	(abs:SI (match_operand:SI 1 "s_register_operand" "l")))
333   (clobber (match_scratch:SI 2 "=&l"))]
334  "TARGET_THUMB1"
335  "#"
336  "TARGET_THUMB1 && reload_completed"
337  [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
338   (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
339   (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
340  ""
341  [(set_attr "length" "6")
342   (set_attr "type" "multiple")]
343)
344
345(define_insn_and_split "*thumb1_neg_abssi2"
346  [(set (match_operand:SI 0 "s_register_operand" "=l")
347	(neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
348   (clobber (match_scratch:SI 2 "=&l"))]
349  "TARGET_THUMB1"
350  "#"
351  "TARGET_THUMB1 && reload_completed"
352  [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
353   (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
354   (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
355  ""
356  [(set_attr "length" "6")
357   (set_attr "type" "multiple")]
358)
359
360(define_insn "*thumb1_one_cmplsi2"
361  [(set (match_operand:SI         0 "register_operand" "=l")
362	(not:SI (match_operand:SI 1 "register_operand"  "l")))]
363  "TARGET_THUMB1"
364  "mvns\\t%0, %1"
365  [(set_attr "length" "2")
366   (set_attr "type" "mvn_reg")]
367)
368
369(define_insn "*thumb1_zero_extendhisi2"
370  [(set (match_operand:SI 0 "register_operand" "=l,l")
371	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
372  "TARGET_THUMB1"
373{
374  rtx mem;
375
376  if (which_alternative == 0 && arm_arch6)
377    return "uxth\t%0, %1";
378  if (which_alternative == 0)
379    return "#";
380
381  mem = XEXP (operands[1], 0);
382
383  if (GET_CODE (mem) == CONST)
384    mem = XEXP (mem, 0);
385
386  if (GET_CODE (mem) == PLUS)
387    {
388      rtx a = XEXP (mem, 0);
389
390      /* This can happen due to bugs in reload.  */
391      if (REG_P (a) && REGNO (a) == SP_REGNUM)
392        {
393          rtx ops[2];
394          ops[0] = operands[0];
395          ops[1] = a;
396
397          output_asm_insn ("mov\t%0, %1", ops);
398
399          XEXP (mem, 0) = operands[0];
400       }
401    }
402
403  return "ldrh\t%0, %1";
404}
405  [(set_attr_alternative "length"
406			 [(if_then_else (eq_attr "is_arch6" "yes")
407				       (const_int 2) (const_int 4))
408			 (const_int 4)])
409   (set_attr "type" "extend,load_byte")]
410)
411
412(define_insn "*thumb1_zero_extendqisi2"
413  [(set (match_operand:SI 0 "register_operand" "=l,l")
414	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
415  "TARGET_THUMB1 && !arm_arch6"
416  "@
417   #
418   ldrb\\t%0, %1"
419  [(set_attr "length" "4,2")
420   (set_attr "type" "alu_shift_reg,load_byte")
421   (set_attr "pool_range" "*,32")]
422)
423
424(define_insn "*thumb1_zero_extendqisi2_v6"
425  [(set (match_operand:SI 0 "register_operand" "=l,l")
426	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
427  "TARGET_THUMB1 && arm_arch6"
428  "@
429   uxtb\\t%0, %1
430   ldrb\\t%0, %1"
431  [(set_attr "length" "2")
432   (set_attr "type" "extend,load_byte")]
433)
434
435;; We used to have an early-clobber on the scratch register here.
436;; However, there's a bug somewhere in reload which means that this
437;; can be partially ignored during spill allocation if the memory
438;; address also needs reloading; this causes us to die later on when
439;; we try to verify the operands.  Fortunately, we don't really need
440;; the early-clobber: we can always use operand 0 if operand 2
441;; overlaps the address.
442(define_insn "thumb1_extendhisi2"
443  [(set (match_operand:SI 0 "register_operand" "=l,l")
444	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
445   (clobber (match_scratch:SI 2 "=X,l"))]
446  "TARGET_THUMB1"
447  "*
448  {
449    rtx ops[4];
450    rtx mem;
451
452    if (which_alternative == 0 && !arm_arch6)
453      return \"#\";
454    if (which_alternative == 0)
455      return \"sxth\\t%0, %1\";
456
457    mem = XEXP (operands[1], 0);
458
459    /* This code used to try to use 'V', and fix the address only if it was
460       offsettable, but this fails for e.g. REG+48 because 48 is outside the
461       range of QImode offsets, and offsettable_address_p does a QImode
462       address check.  */
463
464    if (GET_CODE (mem) == CONST)
465      mem = XEXP (mem, 0);
466
467    if (GET_CODE (mem) == LABEL_REF)
468      return \"ldr\\t%0, %1\";
469
470    if (GET_CODE (mem) == PLUS)
471      {
472        rtx a = XEXP (mem, 0);
473        rtx b = XEXP (mem, 1);
474
475        if (GET_CODE (a) == LABEL_REF
476	    && CONST_INT_P (b))
477          return \"ldr\\t%0, %1\";
478
479        if (REG_P (b))
480          return \"ldrsh\\t%0, %1\";
481
482        ops[1] = a;
483        ops[2] = b;
484      }
485    else
486      {
487        ops[1] = mem;
488        ops[2] = const0_rtx;
489      }
490
491    gcc_assert (REG_P (ops[1]));
492
493    ops[0] = operands[0];
494    if (reg_mentioned_p (operands[2], ops[1]))
495      ops[3] = ops[0];
496    else
497      ops[3] = operands[2];
498    output_asm_insn (\"movs\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
499    return \"\";
500  }"
501  [(set_attr_alternative "length"
502			 [(if_then_else (eq_attr "is_arch6" "yes")
503					(const_int 2) (const_int 4))
504			  (const_int 4)])
505   (set_attr "type" "extend,load_byte")
506   (set_attr "pool_range" "*,1018")]
507)
508
509(define_split
510  [(set (match_operand:SI 0 "register_operand" "")
511	(sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
512  "TARGET_THUMB1 && reload_completed"
513  [(set (match_dup 0) (match_dup 2))
514   (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
515{
516  rtx addr = XEXP (operands[1], 0);
517
518  if (GET_CODE (addr) == CONST)
519    addr = XEXP (addr, 0);
520
521  if (GET_CODE (addr) == PLUS
522      && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
523    /* No split necessary.  */
524    FAIL;
525
526  if (GET_CODE (addr) == PLUS
527      && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
528    FAIL;
529
530  if (reg_overlap_mentioned_p (operands[0], addr))
531    {
532      rtx t = gen_lowpart (QImode, operands[0]);
533      emit_move_insn (t, operands[1]);
534      emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
535      DONE;
536    }
537
538  if (REG_P (addr))
539    {
540      addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
541      operands[2] = const0_rtx;
542    }
543  else if (GET_CODE (addr) != PLUS)
544    FAIL;
545  else if (REG_P (XEXP (addr, 0)))
546    {
547      operands[2] = XEXP (addr, 1);
548      addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
549    }
550  else
551    {
552      operands[2] = XEXP (addr, 0);
553      addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
554    }
555
556  operands[3] = change_address (operands[1], QImode, addr);
557})
558
559(define_peephole2
560  [(set (match_operand:SI 0 "register_operand" "")
561	(plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
562   (set (match_operand:SI 2 "register_operand" "") (const_int 0))
563   (set (match_operand:SI 3 "register_operand" "")
564	(sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
565  "TARGET_THUMB1
566   && GET_CODE (XEXP (operands[4], 0)) == PLUS
567   && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
568   && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
569   && (peep2_reg_dead_p (3, operands[0])
570       || rtx_equal_p (operands[0], operands[3]))
571   && (peep2_reg_dead_p (3, operands[2])
572       || rtx_equal_p (operands[2], operands[3]))"
573  [(set (match_dup 2) (match_dup 1))
574   (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
575{
576  rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
577  operands[4] = change_address (operands[4], QImode, addr);
578})
579
580(define_insn "thumb1_extendqisi2"
581  [(set (match_operand:SI 0 "register_operand" "=l,l,l")
582	(sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
583  "TARGET_THUMB1"
584{
585  rtx addr;
586
587  if (which_alternative == 0 && arm_arch6)
588    return "sxtb\\t%0, %1";
589  if (which_alternative == 0)
590    return "#";
591
592  addr = XEXP (operands[1], 0);
593  if (GET_CODE (addr) == PLUS
594      && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
595    return "ldrsb\\t%0, %1";
596
597  return "#";
598}
599  [(set_attr_alternative "length"
600			 [(if_then_else (eq_attr "is_arch6" "yes")
601					(const_int 2) (const_int 4))
602			  (const_int 2)
603			  (if_then_else (eq_attr "is_arch6" "yes")
604					(const_int 4) (const_int 6))])
605   (set_attr "type" "extend,load_byte,load_byte")]
606)
607
608;;; ??? This should have alternatives for constants.
609;;; ??? This was originally identical to the movdf_insn pattern.
610;;; ??? The 'i' constraint looks funny, but it should always be replaced by
611;;; thumb_reorg with a memory reference.
612(define_insn "*thumb1_movdi_insn"
613  [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,r,l,>,l, m,*r")
614	(match_operand:DI 1 "general_operand"      "l, I,J,j,>,l,mi,l,*r"))]
615  "TARGET_THUMB1
616   && (   register_operand (operands[0], DImode)
617       || register_operand (operands[1], DImode))"
618  "*
619  {
620  switch (which_alternative)
621    {
622    default:
623    case 0:
624      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
625	return \"add\\t%0,  %1,  #0\;add\\t%H0, %H1, #0\";
626      return   \"add\\t%H0, %H1, #0\;add\\t%0,  %1,  #0\";
627    case 1:
628      return \"movs\\t%Q0, %1\;movs\\t%R0, #0\";
629    case 2:
630      operands[1] = GEN_INT (- INTVAL (operands[1]));
631      return \"movs\\t%Q0, %1\;rsbs\\t%Q0, %Q0, #0\;asrs\\t%R0, %Q0, #31\";
632    case 3:
633      gcc_assert (TARGET_HAVE_MOVT);
634      return \"movw\\t%Q0, %L1\;movs\\tR0, #0\";
635    case 4:
636      return \"ldmia\\t%1, {%0, %H0}\";
637    case 5:
638      return \"stmia\\t%0, {%1, %H1}\";
639    case 6:
640      return thumb_load_double_from_address (operands);
641    case 7:
642      operands[2] = gen_rtx_MEM (SImode,
643			     plus_constant (Pmode, XEXP (operands[0], 0), 4));
644      output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
645      return \"\";
646    case 8:
647      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
648	return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
649      return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
650    }
651  }"
652  [(set_attr "length" "4,4,6,6,2,2,6,4,4")
653   (set_attr "type" "multiple,multiple,multiple,multiple,load_8,store_8,load_8,store_8,multiple")
654   (set_attr "arch" "t1,t1,t1,v8mb,t1,t1,t1,t1,t1")
655   (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")]
656)
657
658(define_insn "*thumb1_movsi_insn"
659  [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,r,l,l,l,>,l, l, m,*l*h*k")
660	(match_operand:SI 1 "general_operand"      "l, I,j,J,K,>,l,i, mi,l,*l*h*k"))]
661  "TARGET_THUMB1
662   && (   register_operand (operands[0], SImode)
663       || register_operand (operands[1], SImode))"
664{
665  switch (which_alternative)
666    {
667      default:
668      case 0: return "movs\t%0, %1";
669      case 1: return "movs\t%0, %1";
670      case 2: return "movw\t%0, %1";
671      case 3: return "#";
672      case 4: return "#";
673      case 5: return "ldmia\t%1, {%0}";
674      case 6: return "stmia\t%0, {%1}";
675      case 7:
676      /* pure-code alternative: build the constant byte by byte,
677	 instead of loading it from a constant pool.  */
678	if (arm_valid_symbolic_address_p (operands[1]))
679	  {
680	    output_asm_insn (\"movs\\t%0, #:upper8_15:%1\", operands);
681	    output_asm_insn (\"lsls\\t%0, #8\", operands);
682	    output_asm_insn (\"adds\\t%0, #:upper0_7:%1\", operands);
683	    output_asm_insn (\"lsls\\t%0, #8\", operands);
684	    output_asm_insn (\"adds\\t%0, #:lower8_15:%1\", operands);
685	    output_asm_insn (\"lsls\\t%0, #8\", operands);
686	    output_asm_insn (\"adds\\t%0, #:lower0_7:%1\", operands);
687	    return \"\";
688	  }
689	else if (GET_CODE (operands[1]) == CONST_INT)
690	  {
691	    thumb1_gen_const_int_print (operands[0], INTVAL (operands[1]));
692	    return \"\";
693	  }
694
695	gcc_unreachable ();
696
697      case 8: return "ldr\t%0, %1";
698      case 9: return "str\t%1, %0";
699      case 10: return "mov\t%0, %1";
700    }
701}
702  [(set_attr "length" "2,2,4,4,4,2,2,14,2,2,2")
703   (set_attr "type" "mov_reg,mov_imm,mov_imm,multiple,multiple,load_4,store_4,alu_sreg,load_4,store_4,mov_reg")
704   (set_attr "pool_range" "*,*,*,*,*,*,*, *,1018,*,*")
705   (set_attr "arch" "t1,t1,v8mb,t1,t1,t1,t1,t1,t1,t1,t1")
706   (set_attr "required_for_purecode" "no,no,no,no,no,no,no,yes,no,no,no")
707   (set_attr "conds" "set,clob,nocond,*,*,nocond,nocond,clob,nocond,nocond,nocond")])
708
709; Split the load of 64-bit constant into two loads for high and low 32-bit parts respectively
710; to see if we can load them in fewer instructions or fewer cycles.
711; For the small 64-bit integer constants that satisfy constraint J, the instruction pattern
712; thumb1_movdi_insn has a better way to handle them.
713(define_split
714  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
715       (match_operand:ANY64 1 "immediate_operand" ""))]
716  "TARGET_THUMB1 && reload_completed && !satisfies_constraint_J (operands[1])"
717  [(set (match_dup 0) (match_dup 1))
718   (set (match_dup 2) (match_dup 3))]
719  "
720  operands[2] = gen_highpart (SImode, operands[0]);
721  operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
722                                  operands[1]);
723  operands[0] = gen_lowpart (SImode, operands[0]);
724  operands[1] = gen_lowpart (SImode, operands[1]);
725  "
726)
727
728(define_split
729  [(set (match_operand:SI 0 "register_operand" "")
730	(match_operand:SI 1 "const_int_operand" ""))]
731  "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
732  [(set (match_dup 2) (match_dup 1))
733   (set (match_dup 0) (neg:SI (match_dup 2)))]
734  "
735  {
736    operands[1] = GEN_INT (- INTVAL (operands[1]));
737    operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
738  }"
739)
740
741(define_split
742  [(set (match_operand:SI 0 "register_operand" "")
743	(match_operand:SI 1 "const_int_operand" ""))]
744  "TARGET_THUMB1 && satisfies_constraint_K (operands[1])
745   && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
746  [(set (match_dup 2) (match_dup 1))
747   (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
748  "
749  {
750    unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
751    unsigned HOST_WIDE_INT mask = 0xff;
752    int i;
753
754    for (i = 0; i < 25; i++)
755      if ((val & (mask << i)) == val)
756        break;
757
758    /* Don't split if the shift is zero.  */
759    if (i == 0)
760      FAIL;
761
762    operands[1] = GEN_INT (val >> i);
763    operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
764    operands[3] = GEN_INT (i);
765  }"
766)
767
768;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
769(define_split
770  [(set (match_operand:SI 0 "register_operand" "")
771	(match_operand:SI 1 "const_int_operand" ""))]
772  "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])
773   && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
774  [(set (match_dup 2) (match_dup 1))
775   (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
776  "
777  {
778    operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
779    operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
780    operands[3] = GEN_INT (255);
781  }"
782)
783
784(define_split
785  [(set (match_operand:SI 0 "register_operand" "")
786	(match_operand:SI 1 "const_int_operand" ""))]
787  "TARGET_THUMB1
788   && arm_disable_literal_pool
789   && GET_CODE (operands[1]) == CONST_INT
790   && !TARGET_HAVE_MOVT
791   && !satisfies_constraint_K (operands[1])"
792  [(clobber (const_int 0))]
793  "
794    thumb1_gen_const_int_rtl (operands[0], INTVAL (operands[1]));
795    DONE;
796  "
797)
798
799(define_insn "*thumb1_movhi_insn"
800  [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l,r")
801	(match_operand:HI 1 "general_operand"       "l,m,l,k*h,*r,I,n"))]
802  "TARGET_THUMB1
803   && (   register_operand (operands[0], HImode)
804       || register_operand (operands[1], HImode))"
805  "*
806  switch (which_alternative)
807    {
808    case 0: return \"adds	%0, %1, #0\";
809    case 2: return \"strh	%1, %0\";
810    case 3: return \"mov	%0, %1\";
811    case 4: return \"mov	%0, %1\";
812    case 5: return \"movs	%0, %1\";
813    case 6: gcc_assert (TARGET_HAVE_MOVT);
814	    return \"movw	%0, %L1\";
815    default: gcc_unreachable ();
816    case 1:
817      /* The stack pointer can end up being taken as an index register.
818          Catch this case here and deal with it.  */
819      if (GET_CODE (XEXP (operands[1], 0)) == PLUS
820	  && REG_P (XEXP (XEXP (operands[1], 0), 0))
821	  && REGNO    (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
822        {
823	  rtx ops[2];
824          ops[0] = operands[0];
825          ops[1] = XEXP (XEXP (operands[1], 0), 0);
826
827          output_asm_insn (\"mov	%0, %1\", ops);
828
829          XEXP (XEXP (operands[1], 0), 0) = operands[0];
830
831	}
832      return \"ldrh	%0, %1\";
833    }"
834  [(set_attr "length" "2,4,2,2,2,2,4")
835   (set_attr "type" "alus_imm,load_4,store_4,mov_reg,mov_reg,mov_imm,mov_imm")
836   (set_attr "arch" "t1,t1,t1,t1,t1,t1,v8mb")
837   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob,nocond")])
838
839(define_expand "thumb_movhi_clobber"
840  [(set (match_operand:HI     0 "memory_operand")
841	(match_operand:HI     1 "register_operand"))
842   (clobber (match_operand:DI 2 "register_operand"))]
843  "TARGET_THUMB1"
844  "
845  if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
846      && REGNO (operands[1]) <= LAST_LO_REGNUM)
847    {
848      emit_insn (gen_movhi (operands[0], operands[1]));
849      DONE;
850    }
851  /* XXX Fixme, need to handle other cases here as well.  */
852  gcc_unreachable ();
853  "
854)
855
856(define_insn "*thumb1_movqi_insn"
857  [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l")
858	(match_operand:QI 1 "general_operand"       "l,m,l,k*h,*r,I"))]
859  "TARGET_THUMB1
860   && (   register_operand (operands[0], QImode)
861       || register_operand (operands[1], QImode))"
862  "@
863   adds\\t%0, %1, #0
864   ldrb\\t%0, %1
865   strb\\t%1, %0
866   mov\\t%0, %1
867   mov\\t%0, %1
868   movs\\t%0, %1"
869  [(set_attr "length" "2")
870   (set_attr "type" "alu_imm,load_4,store_4,mov_reg,mov_imm,mov_imm")
871   (set_attr "pool_range" "*,32,*,*,*,*")
872   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
873
874(define_insn "*thumb1_movhf"
875  [(set (match_operand:HF     0 "nonimmediate_operand" "=l,l,l,m,*r,*h")
876	(match_operand:HF     1 "general_operand"      "l, m,F,l,*h,*r"))]
877  "TARGET_THUMB1
878   && (	  s_register_operand (operands[0], HFmode)
879       || s_register_operand (operands[1], HFmode))"
880  "*
881  switch (which_alternative)
882    {
883    case 0:
884      return \"movs\\t%0, %1\";
885    case 1:
886      {
887	rtx addr;
888	gcc_assert (MEM_P (operands[1]));
889	addr = XEXP (operands[1], 0);
890	if (GET_CODE (addr) == LABEL_REF
891	    || (GET_CODE (addr) == CONST
892		&& GET_CODE (XEXP (addr, 0)) == PLUS
893		&& GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
894		&& CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
895	  {
896	    /* Constant pool entry.  */
897	    return \"ldr\\t%0, %1\";
898	  }
899	return \"ldrh\\t%0, %1\";
900      }
901    case 2:
902    {
903      int bits;
904      int high;
905      rtx ops[3];
906
907      bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
908			     HFmode);
909      ops[0] = operands[0];
910      high = (bits >> 8) & 0xff;
911      ops[1] = GEN_INT (high);
912      ops[2] = GEN_INT (bits & 0xff);
913      if (high != 0)
914	output_asm_insn (\"movs\\t%0, %1\;lsls\\t%0, #8\;adds\\t%0, %2\", ops);
915      else
916	output_asm_insn (\"movs\\t%0, %2\", ops);
917
918      return \"\";
919    }
920    case 3: return \"strh\\t%1, %0\";
921    default: return \"mov\\t%0, %1\";
922    }
923  "
924  [(set_attr "length" "2,2,6,2,2,2")
925   (set_attr "type" "mov_reg,load_4,mov_reg,store_4,mov_reg,mov_reg")
926   (set_attr "pool_range" "*,1018,*,*,*,*")
927   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond")])
928
929;;; ??? This should have alternatives for constants.
930(define_insn "*thumb1_movsf_insn"
931  [(set (match_operand:SF     0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
932	(match_operand:SF     1 "general_operand"      "l, >,l,mF,l,*h,*r"))]
933  "TARGET_THUMB1
934   && (   register_operand (operands[0], SFmode)
935       || register_operand (operands[1], SFmode))"
936  "@
937   adds\\t%0, %1, #0
938   ldmia\\t%1, {%0}
939   stmia\\t%0, {%1}
940   ldr\\t%0, %1
941   str\\t%1, %0
942   mov\\t%0, %1
943   mov\\t%0, %1"
944  [(set_attr "length" "2")
945   (set_attr "type" "alus_imm,load_4,store_4,load_4,store_4,mov_reg,mov_reg")
946   (set_attr "pool_range" "*,*,*,1018,*,*,*")
947   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
948)
949
950;;; ??? This should have alternatives for constants.
951;;; ??? This was originally identical to the movdi_insn pattern.
952;;; ??? The 'F' constraint looks funny, but it should always be replaced by
953;;; thumb_reorg with a memory reference.
954(define_insn "*thumb_movdf_insn"
955  [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
956	(match_operand:DF 1 "general_operand"      "l, >,l,mF,l,*r"))]
957  "TARGET_THUMB1
958   && (   register_operand (operands[0], DFmode)
959       || register_operand (operands[1], DFmode))"
960  "*
961  switch (which_alternative)
962    {
963    default:
964    case 0:
965      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
966	return \"adds\\t%0, %1, #0\;adds\\t%H0, %H1, #0\";
967      return \"adds\\t%H0, %H1, #0\;adds\\t%0, %1, #0\";
968    case 1:
969      return \"ldmia\\t%1, {%0, %H0}\";
970    case 2:
971      return \"stmia\\t%0, {%1, %H1}\";
972    case 3:
973      return thumb_load_double_from_address (operands);
974    case 4:
975      operands[2] = gen_rtx_MEM (SImode,
976				 plus_constant (Pmode,
977						XEXP (operands[0], 0), 4));
978      output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
979      return \"\";
980    case 5:
981      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
982	return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
983      return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
984    }
985  "
986  [(set_attr "length" "4,2,2,6,4,4")
987   (set_attr "type" "multiple,load_8,store_8,load_8,store_8,multiple")
988   (set_attr "pool_range" "*,*,*,1018,*,*")]
989)
990
991
992;; Thumb block-move insns
993
994(define_insn "cpymem12b"
995  [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
996	(mem:SI (match_operand:SI 3 "register_operand" "1")))
997   (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
998	(mem:SI (plus:SI (match_dup 3) (const_int 4))))
999   (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
1000	(mem:SI (plus:SI (match_dup 3) (const_int 8))))
1001   (set (match_operand:SI 0 "register_operand" "=l")
1002	(plus:SI (match_dup 2) (const_int 12)))
1003   (set (match_operand:SI 1 "register_operand" "=l")
1004	(plus:SI (match_dup 3) (const_int 12)))
1005   (clobber (match_scratch:SI 4 "=&l"))
1006   (clobber (match_scratch:SI 5 "=&l"))
1007   (clobber (match_scratch:SI 6 "=&l"))]
1008  "TARGET_THUMB1"
1009  "* return thumb_output_move_mem_multiple (3, operands);"
1010  [(set_attr "length" "4")
1011   ; This isn't entirely accurate...  It loads as well, but in terms of
1012   ; scheduling the following insn it is better to consider it as a store
1013   (set_attr "type" "store_12")]
1014)
1015
1016(define_insn "cpymem8b"
1017  [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
1018	(mem:SI (match_operand:SI 3 "register_operand" "1")))
1019   (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
1020	(mem:SI (plus:SI (match_dup 3) (const_int 4))))
1021   (set (match_operand:SI 0 "register_operand" "=l")
1022	(plus:SI (match_dup 2) (const_int 8)))
1023   (set (match_operand:SI 1 "register_operand" "=l")
1024	(plus:SI (match_dup 3) (const_int 8)))
1025   (clobber (match_scratch:SI 4 "=&l"))
1026   (clobber (match_scratch:SI 5 "=&l"))]
1027  "TARGET_THUMB1"
1028  "* return thumb_output_move_mem_multiple (2, operands);"
1029  [(set_attr "length" "4")
1030   ; This isn't entirely accurate...  It loads as well, but in terms of
1031   ; scheduling the following insn it is better to consider it as a store
1032   (set_attr "type" "store_8")]
1033)
1034
1035
1036;; A pattern to recognize a special situation and optimize for it.
1037;; On the thumb, zero-extension from memory is preferrable to sign-extension
1038;; due to the available addressing modes.  Hence, convert a signed comparison
1039;; with zero into an unsigned comparison with 127 if possible.
1040(define_expand "cbranchqi4"
1041  [(set (pc) (if_then_else
1042	      (match_operator 0 "lt_ge_comparison_operator"
1043	       [(match_operand:QI 1 "memory_operand")
1044	        (match_operand:QI 2 "const0_operand")])
1045	      (label_ref (match_operand 3 "" ""))
1046	      (pc)))]
1047  "TARGET_THUMB1"
1048{
1049  rtx xops[4];
1050  xops[1] = gen_reg_rtx (SImode);
1051  emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
1052  xops[2] = GEN_INT (127);
1053  xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
1054			    VOIDmode, xops[1], xops[2]);
1055  xops[3] = operands[3];
1056  emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
1057  DONE;
1058})
1059
1060;; A pattern for the CB(N)Z instruction added in ARMv8-M Baseline profile,
1061;; adapted from cbranchsi4_insn.  Modifying cbranchsi4_insn instead leads to
1062;; code generation difference for ARMv6-M because the minimum length of the
1063;; instruction becomes 2 even for ARMv6-M due to a limitation in genattrtab's
1064;; handling of PC in the length condition.
1065(define_insn "thumb1_cbz"
1066  [(set (pc) (if_then_else
1067	      (match_operator 0 "equality_operator"
1068	       [(match_operand:SI 1 "s_register_operand" "l")
1069		(const_int 0)])
1070	      (label_ref (match_operand 2 "" ""))
1071	      (pc)))]
1072  "TARGET_THUMB1 && TARGET_HAVE_CBZ"
1073{
1074  if (get_attr_length (insn) == 2)
1075    {
1076      if (GET_CODE (operands[0]) == EQ)
1077	return "cbz\t%1, %l2";
1078      else
1079	return "cbnz\t%1, %l2";
1080    }
1081  else
1082    {
1083      rtx t = cfun->machine->thumb1_cc_insn;
1084      if (t != NULL_RTX)
1085	{
1086	  if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
1087	      || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
1088	    t = NULL_RTX;
1089	  if (cfun->machine->thumb1_cc_mode == CC_NZmode)
1090	    {
1091	      if (!nz_comparison_operator (operands[0], VOIDmode))
1092		t = NULL_RTX;
1093	    }
1094	  else if (cfun->machine->thumb1_cc_mode != CCmode)
1095	    t = NULL_RTX;
1096	}
1097      if (t == NULL_RTX)
1098	{
1099	  output_asm_insn ("cmp\t%1, #0", operands);
1100	  cfun->machine->thumb1_cc_insn = insn;
1101	  cfun->machine->thumb1_cc_op0 = operands[1];
1102	  cfun->machine->thumb1_cc_op1 = operands[2];
1103	  cfun->machine->thumb1_cc_mode = CCmode;
1104	}
1105      else
1106	/* Ensure we emit the right type of condition code on the jump.  */
1107	XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
1108					     CC_REGNUM);
1109
1110      switch (get_attr_length (insn))
1111	{
1112	case 4:  return "b%d0\t%l2";
1113	case 6:  return "b%D0\t.LCB%=;b\t%l2\t%@long jump\n.LCB%=:";
1114	case 8:  return "b%D0\t.LCB%=;bl\t%l2\t%@far jump\n.LCB%=:";
1115	default: gcc_unreachable ();
1116	}
1117    }
1118}
1119  [(set (attr "far_jump")
1120	(if_then_else
1121	    (eq_attr "length" "8")
1122	    (const_string "yes")
1123	    (const_string "no")))
1124   (set (attr "length")
1125	(if_then_else
1126	    (and (ge (minus (match_dup 2) (pc)) (const_int 2))
1127		 (le (minus (match_dup 2) (pc)) (const_int 128)))
1128	    (const_int 2)
1129	    (if_then_else
1130		(and (ge (minus (match_dup 2) (pc)) (const_int -250))
1131		     (le (minus (match_dup 2) (pc)) (const_int 256)))
1132		(const_int 4)
1133		(if_then_else
1134		    (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
1135			 (le (minus (match_dup 2) (pc)) (const_int 2048)))
1136		    (const_int 6)
1137		    (const_int 8)))))
1138   (set (attr "type")
1139	(if_then_else
1140	    (eq_attr "length" "2")
1141	    (const_string "branch")
1142	    (const_string "multiple")))]
1143)
1144
1145;; Changes to the constraints of this pattern must be propagated to those of
1146;; atomic compare_and_swap splitters in sync.md.  These must be at least as
1147;; strict as the constraints here and aim to be as permissive.
1148(define_insn "cbranchsi4_insn"
1149  [(set (pc) (if_then_else
1150	      (match_operator 0 "arm_comparison_operator"
1151	       [(match_operand:SI 1 "s_register_operand" "l,l*h")
1152	        (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
1153	      (label_ref (match_operand 3 "" ""))
1154	      (pc)))]
1155  "TARGET_THUMB1"
1156{
1157  rtx t = cfun->machine->thumb1_cc_insn;
1158  if (t != NULL_RTX)
1159    {
1160      if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
1161	  || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
1162	t = NULL_RTX;
1163      if (cfun->machine->thumb1_cc_mode == CC_NZmode)
1164	{
1165	  if (!nz_comparison_operator (operands[0], VOIDmode))
1166	    t = NULL_RTX;
1167	}
1168      else if (cfun->machine->thumb1_cc_mode != CCmode)
1169	t = NULL_RTX;
1170    }
1171  if (t == NULL_RTX)
1172    {
1173      output_asm_insn ("cmp\t%1, %2", operands);
1174      cfun->machine->thumb1_cc_insn = insn;
1175      cfun->machine->thumb1_cc_op0 = operands[1];
1176      cfun->machine->thumb1_cc_op1 = operands[2];
1177      cfun->machine->thumb1_cc_mode = CCmode;
1178    }
1179  else
1180    /* Ensure we emit the right type of condition code on the jump.  */
1181    XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
1182					 CC_REGNUM);
1183
1184  switch (get_attr_length (insn))
1185    {
1186    case 4:  return \"b%d0\\t%l3\";
1187    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1188    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1189    }
1190}
1191  [(set (attr "far_jump")
1192        (if_then_else
1193	    (eq_attr "length" "8")
1194	    (const_string "yes")
1195            (const_string "no")))
1196   (set (attr "length")
1197        (if_then_else
1198	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1199	         (le (minus (match_dup 3) (pc)) (const_int 256)))
1200	    (const_int 4)
1201	    (if_then_else
1202	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1203		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
1204		(const_int 6)
1205		(const_int 8))))
1206   (set_attr "type" "multiple")]
1207)
1208
1209;; An expander which makes use of the cbranchsi4_scratch insn, but can
1210;; be used safely after RA.
1211(define_expand "cbranchsi4_neg_late"
1212  [(parallel [
1213     (set (pc) (if_then_else
1214		(match_operator 4 "arm_comparison_operator"
1215		 [(match_operand:SI 1 "s_register_operand")
1216		  (match_operand:SI 2 "thumb1_cmpneg_operand")])
1217		(label_ref (match_operand 3 "" ""))
1218		(pc)))
1219     (clobber (match_operand:SI 0 "s_register_operand"))
1220  ])]
1221  "TARGET_THUMB1"
1222)
1223
1224;; Changes to the constraints of this pattern must be propagated to those of
1225;; atomic compare_and_swap splitters in sync.md.  These must be at least as
1226;; strict as the constraints here and aim to be as permissive.
1227(define_insn "cbranchsi4_scratch"
1228  [(set (pc) (if_then_else
1229	      (match_operator 4 "arm_comparison_operator"
1230	       [(match_operand:SI 1 "s_register_operand" "l,0")
1231	        (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
1232	      (label_ref (match_operand 3 "" ""))
1233	      (pc)))
1234   (clobber (match_scratch:SI 0 "=l,l"))]
1235  "TARGET_THUMB1"
1236  "*
1237  output_asm_insn (\"adds\\t%0, %1, #%n2\", operands);
1238
1239  switch (get_attr_length (insn))
1240    {
1241    case 4:  return \"b%d4\\t%l3\";
1242    case 6:  return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1243    default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1244    }
1245  "
1246  [(set (attr "far_jump")
1247        (if_then_else
1248	    (eq_attr "length" "8")
1249	    (const_string "yes")
1250            (const_string "no")))
1251   (set (attr "length")
1252        (if_then_else
1253	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1254	         (le (minus (match_dup 3) (pc)) (const_int 256)))
1255	    (const_int 4)
1256	    (if_then_else
1257	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1258		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
1259		(const_int 6)
1260		(const_int 8))))
1261   (set_attr "type" "multiple")]
1262)
1263
1264(define_insn "*negated_cbranchsi4"
1265  [(set (pc)
1266	(if_then_else
1267	 (match_operator 0 "equality_operator"
1268	  [(match_operand:SI 1 "s_register_operand" "l")
1269	   (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
1270	 (label_ref (match_operand 3 "" ""))
1271	 (pc)))]
1272  "TARGET_THUMB1"
1273  "*
1274  output_asm_insn (\"cmn\\t%1, %2\", operands);
1275  switch (get_attr_length (insn))
1276    {
1277    case 4:  return \"b%d0\\t%l3\";
1278    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1279    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1280    }
1281  "
1282  [(set (attr "far_jump")
1283        (if_then_else
1284	    (eq_attr "length" "8")
1285	    (const_string "yes")
1286            (const_string "no")))
1287   (set (attr "length")
1288        (if_then_else
1289	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1290	         (le (minus (match_dup 3) (pc)) (const_int 256)))
1291	    (const_int 4)
1292	    (if_then_else
1293	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1294		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
1295		(const_int 6)
1296		(const_int 8))))
1297   (set_attr "type" "multiple")]
1298)
1299
1300(define_insn "*tbit_cbranch"
1301  [(set (pc)
1302	(if_then_else
1303	 (match_operator 0 "equality_operator"
1304	  [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1305			    (const_int 1)
1306			    (match_operand:SI 2 "const_int_operand" "i"))
1307	   (const_int 0)])
1308	 (label_ref (match_operand 3 "" ""))
1309	 (pc)))
1310   (clobber (match_scratch:SI 4 "=l"))]
1311  "TARGET_THUMB1"
1312  "*
1313  {
1314  rtx op[3];
1315  op[0] = operands[4];
1316  op[1] = operands[1];
1317  op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
1318
1319  output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1320  switch (get_attr_length (insn))
1321    {
1322    case 4:  return \"b%d0\\t%l3\";
1323    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1324    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1325    }
1326  }"
1327  [(set (attr "far_jump")
1328        (if_then_else
1329	    (eq_attr "length" "8")
1330	    (const_string "yes")
1331            (const_string "no")))
1332   (set (attr "length")
1333        (if_then_else
1334	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1335	         (le (minus (match_dup 3) (pc)) (const_int 256)))
1336	    (const_int 4)
1337	    (if_then_else
1338	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1339		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
1340		(const_int 6)
1341		(const_int 8))))
1342   (set_attr "type" "multiple")]
1343)
1344
1345(define_insn "*tlobits_cbranch"
1346  [(set (pc)
1347	(if_then_else
1348	 (match_operator 0 "equality_operator"
1349	  [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1350			    (match_operand:SI 2 "const_int_operand" "i")
1351			    (const_int 0))
1352	   (const_int 0)])
1353	 (label_ref (match_operand 3 "" ""))
1354	 (pc)))
1355   (clobber (match_scratch:SI 4 "=l"))]
1356  "TARGET_THUMB1"
1357  "*
1358  {
1359  rtx op[3];
1360  op[0] = operands[4];
1361  op[1] = operands[1];
1362  op[2] = GEN_INT (32 - INTVAL (operands[2]));
1363
1364  output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1365  switch (get_attr_length (insn))
1366    {
1367    case 4:  return \"b%d0\\t%l3\";
1368    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1369    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1370    }
1371  }"
1372  [(set (attr "far_jump")
1373        (if_then_else
1374	    (eq_attr "length" "8")
1375	    (const_string "yes")
1376            (const_string "no")))
1377   (set (attr "length")
1378        (if_then_else
1379	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1380	         (le (minus (match_dup 3) (pc)) (const_int 256)))
1381	    (const_int 4)
1382	    (if_then_else
1383	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1384		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
1385		(const_int 6)
1386		(const_int 8))))
1387   (set_attr "type" "multiple")]
1388)
1389
1390(define_insn "*tstsi3_cbranch"
1391  [(set (pc)
1392	(if_then_else
1393	 (match_operator 3 "equality_operator"
1394	  [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
1395		   (match_operand:SI 1 "s_register_operand" "l"))
1396	   (const_int 0)])
1397	 (label_ref (match_operand 2 "" ""))
1398	 (pc)))]
1399  "TARGET_THUMB1"
1400  "*
1401  {
1402  output_asm_insn (\"tst\\t%0, %1\", operands);
1403  switch (get_attr_length (insn))
1404    {
1405    case 4:  return \"b%d3\\t%l2\";
1406    case 6:  return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
1407    default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
1408    }
1409  }"
1410  [(set (attr "far_jump")
1411        (if_then_else
1412	    (eq_attr "length" "8")
1413	    (const_string "yes")
1414            (const_string "no")))
1415   (set (attr "length")
1416        (if_then_else
1417	    (and (ge (minus (match_dup 2) (pc)) (const_int -250))
1418	         (le (minus (match_dup 2) (pc)) (const_int 256)))
1419	    (const_int 4)
1420	    (if_then_else
1421	        (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
1422		     (le (minus (match_dup 2) (pc)) (const_int 2048)))
1423		(const_int 6)
1424		(const_int 8))))
1425   (set_attr "type" "multiple")]
1426)
1427
1428(define_insn "*cbranchne_decr1"
1429  [(set (pc)
1430	(if_then_else (match_operator 3 "equality_operator"
1431		       [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
1432		        (const_int 0)])
1433		      (label_ref (match_operand 4 "" ""))
1434		      (pc)))
1435   (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
1436	(plus:SI (match_dup 2) (const_int -1)))
1437   (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
1438  "TARGET_THUMB1"
1439  "*
1440   {
1441     rtx cond[2];
1442     cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1443				? GEU : LTU),
1444			       VOIDmode, operands[2], const1_rtx);
1445     cond[1] = operands[4];
1446
1447     if (which_alternative == 0)
1448       output_asm_insn (\"subs\\t%0, %2, #1\", operands);
1449     else if (which_alternative == 1)
1450       {
1451	 /* We must provide an alternative for a hi reg because reload
1452	    cannot handle output reloads on a jump instruction, but we
1453	    can't subtract into that.  Fortunately a mov from lo to hi
1454	    does not clobber the condition codes.  */
1455	 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1456	 output_asm_insn (\"mov\\t%0, %1\", operands);
1457       }
1458     else
1459       {
1460	 /* Similarly, but the target is memory.  */
1461	 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1462	 output_asm_insn (\"str\\t%1, %0\", operands);
1463       }
1464
1465     switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
1466       {
1467	 case 4:
1468	   output_asm_insn (\"b%d0\\t%l1\", cond);
1469	   return \"\";
1470	 case 6:
1471	   output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1472	   return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
1473	 default:
1474	   output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1475	   return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1476       }
1477   }
1478  "
1479  [(set (attr "far_jump")
1480        (if_then_else
1481	    (ior (and (eq (symbol_ref ("which_alternative"))
1482	                  (const_int 0))
1483		      (eq_attr "length" "8"))
1484		 (eq_attr "length" "10"))
1485	    (const_string "yes")
1486            (const_string "no")))
1487   (set_attr_alternative "length"
1488      [
1489       ;; Alternative 0
1490       (if_then_else
1491	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1492	      (le (minus (match_dup 4) (pc)) (const_int 256)))
1493	 (const_int 4)
1494	 (if_then_else
1495	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1496		(le (minus (match_dup 4) (pc)) (const_int 2048)))
1497	   (const_int 6)
1498	   (const_int 8)))
1499       ;; Alternative 1
1500       (if_then_else
1501	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1502	      (le (minus (match_dup 4) (pc)) (const_int 256)))
1503	 (const_int 6)
1504	 (if_then_else
1505	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1506		(le (minus (match_dup 4) (pc)) (const_int 2048)))
1507	   (const_int 8)
1508	   (const_int 10)))
1509       ;; Alternative 2
1510       (if_then_else
1511	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1512	      (le (minus (match_dup 4) (pc)) (const_int 256)))
1513	 (const_int 6)
1514	 (if_then_else
1515	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1516		(le (minus (match_dup 4) (pc)) (const_int 2048)))
1517	   (const_int 8)
1518	   (const_int 10)))
1519       ;; Alternative 3
1520       (if_then_else
1521	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1522	      (le (minus (match_dup 4) (pc)) (const_int 256)))
1523	 (const_int 6)
1524	 (if_then_else
1525	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1526		(le (minus (match_dup 4) (pc)) (const_int 2048)))
1527	   (const_int 8)
1528	   (const_int 10)))])
1529   (set_attr "type" "multiple")]
1530)
1531
1532(define_insn "*addsi3_cbranch"
1533  [(set (pc)
1534	(if_then_else
1535	 (match_operator 4 "arm_comparison_operator"
1536	  [(plus:SI
1537	    (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
1538	    (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
1539	   (const_int 0)])
1540	 (label_ref (match_operand 5 "" ""))
1541	 (pc)))
1542   (set
1543    (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
1544    (plus:SI (match_dup 2) (match_dup 3)))
1545   (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
1546  "TARGET_THUMB1
1547   && (GET_CODE (operands[4]) == EQ
1548       || GET_CODE (operands[4]) == NE
1549       || GET_CODE (operands[4]) == GE
1550       || GET_CODE (operands[4]) == LT)"
1551  "*
1552   {
1553     rtx cond[3];
1554
1555     cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
1556     cond[1] = operands[2];
1557     cond[2] = operands[3];
1558
1559     if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
1560       output_asm_insn (\"subs\\t%0, %1, #%n2\", cond);
1561     else
1562       output_asm_insn (\"adds\\t%0, %1, %2\", cond);
1563
1564     if (which_alternative >= 2
1565	 && which_alternative < 4)
1566       output_asm_insn (\"mov\\t%0, %1\", operands);
1567     else if (which_alternative >= 4)
1568       output_asm_insn (\"str\\t%1, %0\", operands);
1569
1570     switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
1571       {
1572	 case 4:
1573	   return \"b%d4\\t%l5\";
1574	 case 6:
1575	   return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
1576	 default:
1577	   return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
1578       }
1579   }
1580  "
1581  [(set (attr "far_jump")
1582        (if_then_else
1583	    (ior (and (lt (symbol_ref ("which_alternative"))
1584	                  (const_int 2))
1585		      (eq_attr "length" "8"))
1586		 (eq_attr "length" "10"))
1587	    (const_string "yes")
1588            (const_string "no")))
1589   (set (attr "length")
1590     (if_then_else
1591       (lt (symbol_ref ("which_alternative"))
1592		       (const_int 2))
1593       (if_then_else
1594	 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
1595	      (le (minus (match_dup 5) (pc)) (const_int 256)))
1596	 (const_int 4)
1597	 (if_then_else
1598	   (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
1599		(le (minus (match_dup 5) (pc)) (const_int 2048)))
1600	   (const_int 6)
1601	   (const_int 8)))
1602       (if_then_else
1603	 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
1604	      (le (minus (match_dup 5) (pc)) (const_int 256)))
1605	 (const_int 6)
1606	 (if_then_else
1607	   (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
1608		(le (minus (match_dup 5) (pc)) (const_int 2048)))
1609	   (const_int 8)
1610	   (const_int 10)))))
1611   (set_attr "type" "multiple")]
1612)
1613
1614(define_insn "*addsi3_cbranch_scratch"
1615  [(set (pc)
1616	(if_then_else
1617	 (match_operator 3 "arm_comparison_operator"
1618	  [(plus:SI
1619	    (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
1620	    (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
1621	   (const_int 0)])
1622	 (label_ref (match_operand 4 "" ""))
1623	 (pc)))
1624   (clobber (match_scratch:SI 0 "=X,X,l,l"))]
1625  "TARGET_THUMB1
1626   && (GET_CODE (operands[3]) == EQ
1627       || GET_CODE (operands[3]) == NE
1628       || GET_CODE (operands[3]) == GE
1629       || GET_CODE (operands[3]) == LT)"
1630  "*
1631   {
1632     switch (which_alternative)
1633       {
1634       case 0:
1635	 output_asm_insn (\"cmp\t%1, #%n2\", operands);
1636	 break;
1637       case 1:
1638	 output_asm_insn (\"cmn\t%1, %2\", operands);
1639	 break;
1640       case 2:
1641	 if (INTVAL (operands[2]) < 0)
1642	   output_asm_insn (\"subs\t%0, %1, %2\", operands);
1643	 else
1644	   output_asm_insn (\"adds\t%0, %1, %2\", operands);
1645	 break;
1646       case 3:
1647	 if (INTVAL (operands[2]) < 0)
1648	   output_asm_insn (\"subs\t%0, %0, %2\", operands);
1649	 else
1650	   output_asm_insn (\"adds\t%0, %0, %2\", operands);
1651	 break;
1652       }
1653
1654     switch (get_attr_length (insn))
1655       {
1656	 case 4:
1657	   return \"b%d3\\t%l4\";
1658	 case 6:
1659	   return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
1660	 default:
1661	   return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1662       }
1663   }
1664  "
1665  [(set (attr "far_jump")
1666        (if_then_else
1667	    (eq_attr "length" "8")
1668	    (const_string "yes")
1669            (const_string "no")))
1670   (set (attr "length")
1671       (if_then_else
1672	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1673	      (le (minus (match_dup 4) (pc)) (const_int 256)))
1674	 (const_int 4)
1675	 (if_then_else
1676	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1677		(le (minus (match_dup 4) (pc)) (const_int 2048)))
1678	   (const_int 6)
1679	   (const_int 8))))
1680   (set_attr "type" "multiple")]
1681)
1682
1683(define_insn "*thumb_cmpdi_zero"
1684  [(set (reg:CC_Z CC_REGNUM)
1685	(compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
1686		      (const_int 0)))
1687   (clobber (match_scratch:SI 1 "=l"))]
1688  "TARGET_THUMB1"
1689  "orrs\\t%1, %Q0, %R0"
1690  [(set_attr "conds" "set")
1691   (set_attr "length" "2")
1692   (set_attr "type" "logics_reg")]
1693)
1694
1695(define_expand "cstoresi_eq0_thumb1"
1696  [(parallel
1697    [(set (match_operand:SI 0 "s_register_operand")
1698	  (eq:SI (match_operand:SI 1 "s_register_operand")
1699		 (const_int 0)))
1700     (clobber (match_dup:SI 2))])]
1701  "TARGET_THUMB1"
1702  "operands[2] = gen_reg_rtx (SImode);"
1703)
1704
1705(define_expand "cstoresi_ne0_thumb1"
1706  [(parallel
1707    [(set (match_operand:SI 0 "s_register_operand")
1708	  (ne:SI (match_operand:SI 1 "s_register_operand")
1709		 (const_int 0)))
1710     (clobber (match_dup:SI 2))])]
1711  "TARGET_THUMB1"
1712  "operands[2] = gen_reg_rtx (SImode);"
1713)
1714
1715(define_insn "*cstoresi_eq0_thumb1_insn"
1716  [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
1717	(eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
1718	       (const_int 0)))
1719   (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
1720  "TARGET_THUMB1"
1721  "@
1722   rsbs\\t%0, %1, #0\;adcs\\t%0, %0, %1
1723   rsbs\\t%2, %1, #0\;adcs\\t%0, %1, %2"
1724  [(set_attr "length" "4")
1725   (set_attr "type" "multiple")]
1726)
1727
1728(define_insn "*cstoresi_ne0_thumb1_insn"
1729  [(set (match_operand:SI 0 "s_register_operand" "=l")
1730	(ne:SI (match_operand:SI 1 "s_register_operand" "0")
1731	       (const_int 0)))
1732   (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
1733  "TARGET_THUMB1"
1734  "subs\\t%2, %1, #1\;sbcs\\t%0, %1, %2"
1735  [(set_attr "length" "4")]
1736)
1737
1738;; Used as part of the expansion of thumb ltu and gtu sequences
1739(define_insn "cstoresi_nltu_thumb1"
1740  [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1741        (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1742			(match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
1743  "TARGET_THUMB1"
1744  "cmp\\t%1, %2\;sbcs\\t%0, %0, %0"
1745  [(set_attr "length" "4")
1746   (set_attr "type" "multiple")]
1747)
1748
1749(define_insn_and_split "cstoresi_ltu_thumb1"
1750  [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1751        (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1752		(match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
1753  "TARGET_THUMB1"
1754  "#"
1755  "TARGET_THUMB1"
1756  [(set (match_dup 3)
1757	(neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
1758   (set (match_dup 0) (neg:SI (match_dup 3)))]
1759  "operands[3] = gen_reg_rtx (SImode);"
1760  [(set_attr "length" "4")
1761   (set_attr "type" "multiple")]
1762)
1763
1764;; Used as part of the expansion of thumb les sequence.
1765(define_insn "thumb1_addsi3_addgeu"
1766  [(set (match_operand:SI 0 "s_register_operand" "=l")
1767        (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
1768			  (match_operand:SI 2 "s_register_operand" "l"))
1769		 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
1770			 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
1771  "TARGET_THUMB1"
1772  "cmp\\t%3, %4\;adcs\\t%0, %1, %2"
1773  [(set_attr "length" "4")
1774   (set_attr "type" "multiple")]
1775)
1776
1777
1778(define_insn "*thumb_jump"
1779  [(set (pc)
1780	(label_ref (match_operand 0 "" "")))]
1781  "TARGET_THUMB1"
1782  "*
1783  if (get_attr_length (insn) == 2)
1784    return \"b\\t%l0\";
1785  return \"bl\\t%l0\\t%@ far jump\";
1786  "
1787  [(set (attr "far_jump")
1788        (if_then_else
1789	    (eq_attr "length" "4")
1790	    (const_string "yes")
1791	    (const_string "no")))
1792   (set (attr "length")
1793        (if_then_else
1794	    (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
1795		 (le (minus (match_dup 0) (pc)) (const_int 2048)))
1796  	    (const_int 2)
1797	    (const_int 4)))
1798   (set_attr "type" "branch")]
1799)
1800
1801(define_insn "*call_reg_thumb1_v5"
1802  [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1803	 (match_operand 1 "" ""))
1804   (use (match_operand 2 "" ""))
1805   (clobber (reg:SI LR_REGNUM))]
1806  "TARGET_THUMB1 && arm_arch5t && !SIBLING_CALL_P (insn)"
1807  "blx\\t%0"
1808  [(set_attr "length" "2")
1809   (set_attr "type" "call")]
1810)
1811
1812(define_insn "*nonsecure_call_reg_thumb1_v5"
1813  [(call (unspec:SI [(mem:SI (reg:SI R4_REGNUM))]
1814		    UNSPEC_NONSECURE_MEM)
1815	 (match_operand 0 "" ""))
1816   (use (match_operand 1 "" ""))
1817   (clobber (reg:SI LR_REGNUM))]
1818  "TARGET_THUMB1 && use_cmse && !SIBLING_CALL_P (insn)"
1819  "bl\\t__gnu_cmse_nonsecure_call"
1820  [(set_attr "length" "4")
1821   (set_attr "type" "call")]
1822)
1823
1824(define_insn "*call_reg_thumb1"
1825  [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1826	 (match_operand 1 "" ""))
1827   (use (match_operand 2 "" ""))
1828   (clobber (reg:SI LR_REGNUM))]
1829  "TARGET_THUMB1 && !arm_arch5t && !SIBLING_CALL_P (insn)"
1830  "*
1831  {
1832    if (!TARGET_CALLER_INTERWORKING)
1833      return thumb_call_via_reg (operands[0]);
1834    else if (operands[1] == const0_rtx)
1835      return \"bl\\t%__interwork_call_via_%0\";
1836    else if (frame_pointer_needed)
1837      return \"bl\\t%__interwork_r7_call_via_%0\";
1838    else
1839      return \"bl\\t%__interwork_r11_call_via_%0\";
1840  }"
1841  [(set_attr "type" "call")]
1842)
1843
1844(define_insn "*call_value_reg_thumb1_v5"
1845  [(set (match_operand 0 "" "")
1846	(call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1847	      (match_operand 2 "" "")))
1848   (use (match_operand 3 "" ""))
1849   (clobber (reg:SI LR_REGNUM))]
1850  "TARGET_THUMB1 && arm_arch5t"
1851  "blx\\t%1"
1852  [(set_attr "length" "2")
1853   (set_attr "type" "call")]
1854)
1855
1856(define_insn "*nonsecure_call_value_reg_thumb1_v5"
1857  [(set (match_operand 0 "" "")
1858	(call (unspec:SI
1859	       [(mem:SI (reg:SI R4_REGNUM))]
1860	       UNSPEC_NONSECURE_MEM)
1861	      (match_operand 1 "" "")))
1862   (use (match_operand 2 "" ""))
1863   (clobber (reg:SI LR_REGNUM))]
1864  "TARGET_THUMB1 && use_cmse"
1865  "bl\\t__gnu_cmse_nonsecure_call"
1866  [(set_attr "length" "4")
1867   (set_attr "type" "call")]
1868)
1869
1870(define_insn "*call_value_reg_thumb1"
1871  [(set (match_operand 0 "" "")
1872	(call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1873	      (match_operand 2 "" "")))
1874   (use (match_operand 3 "" ""))
1875   (clobber (reg:SI LR_REGNUM))]
1876  "TARGET_THUMB1 && !arm_arch5t"
1877  "*
1878  {
1879    if (!TARGET_CALLER_INTERWORKING)
1880      return thumb_call_via_reg (operands[1]);
1881    else if (operands[2] == const0_rtx)
1882      return \"bl\\t%__interwork_call_via_%1\";
1883    else if (frame_pointer_needed)
1884      return \"bl\\t%__interwork_r7_call_via_%1\";
1885    else
1886      return \"bl\\t%__interwork_r11_call_via_%1\";
1887  }"
1888  [(set_attr "type" "call")]
1889)
1890
1891(define_insn "*call_insn"
1892  [(call (mem:SI (match_operand:SI 0 "" ""))
1893	 (match_operand:SI 1 "" ""))
1894   (use (match_operand 2 "" ""))
1895   (clobber (reg:SI LR_REGNUM))]
1896  "TARGET_THUMB1
1897   && GET_CODE (operands[0]) == SYMBOL_REF
1898   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
1899  "bl\\t%a0"
1900  [(set_attr "length" "4")
1901   (set_attr "type" "call")]
1902)
1903
1904(define_insn "*call_value_insn"
1905  [(set (match_operand 0 "" "")
1906	(call (mem:SI (match_operand 1 "" ""))
1907	      (match_operand 2 "" "")))
1908   (use (match_operand 3 "" ""))
1909   (clobber (reg:SI LR_REGNUM))]
1910  "TARGET_THUMB1
1911   && GET_CODE (operands[1]) == SYMBOL_REF
1912   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
1913  "bl\\t%a1"
1914  [(set_attr "length" "4")
1915   (set_attr "type" "call")]
1916)
1917
1918(define_expand "thumb1_casesi_internal_pic"
1919  [(match_operand:SI 0 "s_register_operand")
1920   (match_operand:SI 1 "thumb1_cmp_operand")
1921   (match_operand 2 "" "")
1922   (match_operand 3 "" "")]
1923  "TARGET_THUMB1"
1924  {
1925    rtx reg0;
1926    rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
1927    emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
1928				    operands[3]));
1929    reg0 = gen_rtx_REG (SImode, 0);
1930    emit_move_insn (reg0, operands[0]);
1931    emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
1932    DONE;
1933  }
1934)
1935
1936(define_insn "thumb1_casesi_dispatch"
1937  [(parallel [(set (pc) (unspec [(reg:SI 0)
1938				 (label_ref (match_operand 0 "" ""))
1939;;				 (label_ref (match_operand 1 "" ""))
1940]
1941			 UNSPEC_THUMB1_CASESI))
1942	      (clobber (reg:SI IP_REGNUM))
1943              (clobber (reg:SI LR_REGNUM))])]
1944  "TARGET_THUMB1"
1945  "* return thumb1_output_casesi(operands);"
1946  [(set_attr "length" "4")
1947   (set_attr "type" "multiple")]
1948)
1949
1950;; NB Never uses BX.
1951(define_insn "*thumb1_indirect_jump"
1952  [(set (pc)
1953	(match_operand:SI 0 "register_operand" "l*r"))]
1954  "TARGET_THUMB1"
1955  "mov\\tpc, %0"
1956  [(set_attr "conds" "clob")
1957   (set_attr "length" "2")
1958   (set_attr "type" "branch")]
1959)
1960
1961
1962(define_insn "prologue_thumb1_interwork"
1963  [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
1964  "TARGET_THUMB1"
1965  "* return thumb1_output_interwork ();"
1966  [(set_attr "length" "8")
1967   (set_attr "type" "multiple")]
1968)
1969
1970(define_insn "*epilogue_insns"
1971  [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
1972  "TARGET_THUMB1"
1973  "*
1974    return thumb1_unexpanded_epilogue ();
1975  "
1976  ; Length is absolute worst case, when using CMSE and if this is an entry
1977  ; function an extra 4 (MSR) bytes will be added.
1978  [(set (attr "length")
1979	(if_then_else
1980	 (match_test "IS_CMSE_ENTRY (arm_current_func_type ())")
1981	 (const_int 48)
1982	 (const_int 44)))
1983   (set_attr "type" "block")
1984   ;; We don't clobber the conditions, but the potential length of this
1985   ;; operation is sufficient to make conditionalizing the sequence
1986   ;; unlikely to be profitable.
1987   (set_attr "conds" "clob")]
1988)
1989
1990;; Miscellaneous Thumb patterns
1991(define_expand "tablejump"
1992  [(parallel [(set (pc) (match_operand:SI 0 "register_operand"))
1993	      (use (label_ref (match_operand 1 "" "")))])]
1994  "TARGET_THUMB1"
1995  "
1996  if (flag_pic)
1997    {
1998      /* Hopefully, CSE will eliminate this copy.  */
1999      rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
2000      rtx reg2 = gen_reg_rtx (SImode);
2001
2002      emit_insn (gen_addsi3 (reg2, operands[0], reg1));
2003      operands[0] = reg2;
2004    }
2005  "
2006)
2007
2008(define_insn "*thumb1_movpc_insn"
2009  [(set (match_operand:SI 0 "s_register_operand" "=l")
2010	(reg:SI PC_REGNUM))]
2011  "TARGET_THUMB1"
2012  "mov\\t%0, pc"
2013  [(set_attr "length" "2")
2014   (set_attr "conds"  "nocond")
2015   (set_attr "type"   "mov_reg")]
2016)
2017
2018;; NB never uses BX.
2019(define_insn "*thumb1_tablejump"
2020  [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
2021   (use (label_ref (match_operand 1 "" "")))]
2022  "TARGET_THUMB1"
2023  "mov\\t%|pc, %0"
2024  [(set_attr "length" "2")
2025   (set_attr "type" "branch")]
2026)
2027
2028(define_insn_and_split "thumb_eh_return"
2029  [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
2030		    VUNSPEC_EH_RETURN)
2031   (clobber (match_scratch:SI 1 "=&l"))]
2032  "TARGET_THUMB1"
2033  "#"
2034  "&& reload_completed"
2035  [(const_int 0)]
2036  "
2037  {
2038    thumb_set_return_address (operands[0], operands[1]);
2039    DONE;
2040  }"
2041  [(set_attr "type" "mov_reg")]
2042)
2043
2044;; DO NOT SPLIT THIS PATTERN.  It is important for security reasons that the
2045;; canary value does not live beyond the end of this sequence.
2046(define_insn "thumb1_stack_protect_test_insn"
2047  [(set (match_operand:SI 0 "register_operand" "=&l")
2048	(unspec:SI [(match_operand:SI 1 "memory_operand" "m")
2049		    (mem:SI (match_operand:SI 2 "register_operand" "+l"))]
2050	 UNSPEC_SP_TEST))
2051   (clobber (match_dup 2))]
2052  "TARGET_THUMB1"
2053  "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;movs\t%2, #0"
2054  [(set_attr "length" "10")
2055   (set_attr "conds" "clob")
2056   (set_attr "type" "multiple")]
2057)
2058
2059