1;;- Machine description for ARM for GNU compiler
2;;  Copyright (C) 1991-2021 Free Software Foundation, Inc.
3;;  Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4;;  and Martin Simmons (@harleqn.co.uk).
5;;  More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7;; This file is part of GCC.
8
9;; GCC is free software; you can redistribute it and/or modify it
10;; under the terms of the GNU General Public License as published
11;; by the Free Software Foundation; either version 3, or (at your
12;; option) any later version.
13
14;; GCC is distributed in the hope that it will be useful, but WITHOUT
15;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16;; or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
17;; License for more details.
18
19;; You should have received a copy of the GNU General Public License
20;; along with GCC; see the file COPYING3.  If not see
21;; <http://www.gnu.org/licenses/>.
22
23;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25
26;;---------------------------------------------------------------------------
27;; Constants
28
29;; Register numbers -- All machine registers should be defined here
30(define_constants
31  [(R0_REGNUM         0)	; First CORE register
32   (R1_REGNUM	      1)	; Second CORE register
33   (R4_REGNUM	      4)	; Fifth CORE register
34   (FDPIC_REGNUM      9)	; FDPIC register
35   (IP_REGNUM	     12)	; Scratch register
36   (SP_REGNUM	     13)	; Stack pointer
37   (LR_REGNUM        14)	; Return address register
38   (PC_REGNUM	     15)	; Program counter
39   (LAST_ARM_REGNUM  15)	;
40   (CC_REGNUM       100)	; Condition code pseudo register
41   (VFPCC_REGNUM    101)	; VFP Condition code pseudo register
42   (APSRQ_REGNUM    104)	; Q bit pseudo register
43   (APSRGE_REGNUM   105)	; GE bits pseudo register
44   (VPR_REGNUM      106)	; Vector Predication Register - MVE register.
45  ]
46)
47;; 3rd operand to select_dominance_cc_mode
48(define_constants
49  [(DOM_CC_X_AND_Y  0)
50   (DOM_CC_NX_OR_Y  1)
51   (DOM_CC_X_OR_Y   2)
52  ]
53)
54;; conditional compare combination
55(define_constants
56  [(CMP_CMP 0)
57   (CMN_CMP 1)
58   (CMP_CMN 2)
59   (CMN_CMN 3)
60   (NUM_OF_COND_CMP 4)
61  ]
62)
63
64
65;;---------------------------------------------------------------------------
66;; Attributes
67
68;; Processor type.  This is created automatically from arm-cores.def.
69(include "arm-tune.md")
70
71;; Instruction classification types
72(include "types.md")
73
74; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
75; generating ARM code.  This is used to control the length of some insn
76; patterns that share the same RTL in both ARM and Thumb code.
77(define_attr "is_thumb" "yes,no"
78  (const (if_then_else (symbol_ref "TARGET_THUMB")
79		       (const_string "yes") (const_string "no"))))
80
81; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
82(define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
83
84; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
85(define_attr "is_thumb1" "yes,no"
86  (const (if_then_else (symbol_ref "TARGET_THUMB1")
87		       (const_string "yes") (const_string "no"))))
88
89; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
90; The arm_restrict_it flag enables the "short IT" feature which
91; restricts IT blocks to a single 16-bit instruction.
92; This attribute should only be used on 16-bit Thumb-2 instructions
93; which may be predicated (the "predicable" attribute must be set).
94(define_attr "predicable_short_it" "no,yes" (const_string "no"))
95
96; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
97; This attribute should only be used on instructions which may emit
98; an IT block in their expansion which is not a short IT.
99(define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
100
101; Mark an instruction sequence as the required way of loading a
102; constant when -mpure-code is enabled (which implies
103; arm_disable_literal_pool)
104(define_attr "required_for_purecode" "no,yes" (const_string "no"))
105
106;; Operand number of an input operand that is shifted.  Zero if the
107;; given instruction does not shift one of its input operands.
108(define_attr "shift" "" (const_int 0))
109
110;; [For compatibility with AArch64 in pipeline models]
111;; Attribute that specifies whether or not the instruction touches fp
112;; registers.
113(define_attr "fp" "no,yes" (const_string "no"))
114
115; Floating Point Unit.  If we only have floating point emulation, then there
116; is no point in scheduling the floating point insns.  (Well, for best
117; performance we should try and group them together).
118(define_attr "fpu" "none,vfp"
119  (const (symbol_ref "arm_fpu_attr")))
120
121; Predicated means that the insn form is conditionally executed based on a
122; predicate.  We default to 'no' because no Thumb patterns match this rule
123; and not all ARM insns do.
124(define_attr "predicated" "yes,no" (const_string "no"))
125
126; LENGTH of an instruction (in bytes)
127(define_attr "length" ""
128  (const_int 4))
129
130; The architecture which supports the instruction (or alternative).
131; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
132; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode.  "v6"
133; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
134; arm_arch6.  "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
135; Baseline.  "fix_vlldm" is for fixing the v8-m/v8.1-m VLLDM erratum.
136; This attribute is used to compute attribute "enabled",
137; use type "any" to enable an alternative in all cases.
138(define_attr "arch" "any, a, t, 32, t1, t2, v6,nov6, v6t2, \
139		     v8mb, fix_vlldm, iwmmxt, iwmmxt2, armv6_or_vfpv3, \
140		     neon, mve"
141  (const_string "any"))
142
143(define_attr "arch_enabled" "no,yes"
144  (cond [(eq_attr "arch" "any")
145	 (const_string "yes")
146
147	 (and (eq_attr "arch" "a")
148	      (match_test "TARGET_ARM"))
149	 (const_string "yes")
150
151	 (and (eq_attr "arch" "t")
152	      (match_test "TARGET_THUMB"))
153	 (const_string "yes")
154
155	 (and (eq_attr "arch" "t1")
156	      (match_test "TARGET_THUMB1"))
157	 (const_string "yes")
158
159	 (and (eq_attr "arch" "t2")
160	      (match_test "TARGET_THUMB2"))
161	 (const_string "yes")
162
163	 (and (eq_attr "arch" "32")
164	      (match_test "TARGET_32BIT"))
165	 (const_string "yes")
166
167	 (and (eq_attr "arch" "v6")
168	      (match_test "TARGET_32BIT && arm_arch6"))
169	 (const_string "yes")
170
171	 (and (eq_attr "arch" "nov6")
172	      (match_test "TARGET_32BIT && !arm_arch6"))
173	 (const_string "yes")
174
175	 (and (eq_attr "arch" "v6t2")
176	      (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
177	 (const_string "yes")
178
179	 (and (eq_attr "arch" "v8mb")
180	      (match_test "TARGET_THUMB1 && arm_arch8"))
181	 (const_string "yes")
182
183	 (and (eq_attr "arch" "fix_vlldm")
184	      (match_test "fix_vlldm"))
185	 (const_string "yes")
186
187	 (and (eq_attr "arch" "iwmmxt2")
188	      (match_test "TARGET_REALLY_IWMMXT2"))
189	 (const_string "yes")
190
191	 (and (eq_attr "arch" "armv6_or_vfpv3")
192	      (match_test "arm_arch6 || TARGET_VFP3"))
193	 (const_string "yes")
194
195	 (and (eq_attr "arch" "neon")
196	      (match_test "TARGET_NEON"))
197	 (const_string "yes")
198
199	 (and (eq_attr "arch" "mve")
200	      (match_test "TARGET_HAVE_MVE"))
201	 (const_string "yes")
202	]
203
204	(const_string "no")))
205
206(define_attr "opt" "any,speed,size"
207  (const_string "any"))
208
209(define_attr "opt_enabled" "no,yes"
210  (cond [(eq_attr "opt" "any")
211         (const_string "yes")
212
213	 (and (eq_attr "opt" "speed")
214	      (match_test "optimize_function_for_speed_p (cfun)"))
215	 (const_string "yes")
216
217	 (and (eq_attr "opt" "size")
218	      (match_test "optimize_function_for_size_p (cfun)"))
219	 (const_string "yes")]
220	(const_string "no")))
221
222(define_attr "use_literal_pool" "no,yes"
223   (cond [(and (eq_attr "type" "f_loads,f_loadd")
224	       (match_test "CONSTANT_P (operands[1])"))
225	  (const_string "yes")]
226	 (const_string "no")))
227
228; Enable all alternatives that are both arch_enabled and insn_enabled.
229; FIXME:: opt_enabled has been temporarily removed till the time we have
230; an attribute that allows the use of such alternatives.
231; This depends on caching of speed_p, size_p on a per
232; alternative basis. The problem is that the enabled attribute
233; cannot depend on any state that is not cached or is not constant
234; for a compilation unit. We probably need a generic "hot/cold"
235; alternative which if implemented can help with this. We disable this
236; until such a time as this is implemented and / or the improvements or
237; regressions with removing this attribute are double checked.
238; See ashldi3_neon and <shift>di3_neon in neon.md.
239
240 (define_attr "enabled" "no,yes"
241   (cond [(and (eq_attr "predicable_short_it" "no")
242	       (and (eq_attr "predicated" "yes")
243	            (match_test "arm_restrict_it")))
244	  (const_string "no")
245
246	  (and (eq_attr "enabled_for_short_it" "no")
247	       (match_test "arm_restrict_it"))
248	  (const_string "no")
249
250	  (and (eq_attr "required_for_purecode" "yes")
251	       (not (match_test "arm_disable_literal_pool")))
252	  (const_string "no")
253
254	  (eq_attr "arch_enabled" "no")
255	  (const_string "no")]
256	 (const_string "yes")))
257
258; POOL_RANGE is how far away from a constant pool entry that this insn
259; can be placed.  If the distance is zero, then this insn will never
260; reference the pool.
261; Note that for Thumb constant pools the PC value is rounded down to the
262; nearest multiple of four.  Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
263; Thumb insns) should be set to <max_range> - 2.
264; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
265; before its address.  It is set to <max_range> - (8 + <data_size>).
266(define_attr "arm_pool_range" "" (const_int 0))
267(define_attr "thumb2_pool_range" "" (const_int 0))
268(define_attr "arm_neg_pool_range" "" (const_int 0))
269(define_attr "thumb2_neg_pool_range" "" (const_int 0))
270
271(define_attr "pool_range" ""
272  (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
273	(attr "arm_pool_range")))
274(define_attr "neg_pool_range" ""
275  (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
276	(attr "arm_neg_pool_range")))
277
278; An assembler sequence may clobber the condition codes without us knowing.
279; If such an insn references the pool, then we have no way of knowing how,
280; so use the most conservative value for pool_range.
281(define_asm_attributes
282 [(set_attr "conds" "clob")
283  (set_attr "length" "4")
284  (set_attr "pool_range" "250")])
285
286; Load scheduling, set from the arm_ld_sched variable
287; initialized by arm_option_override()
288(define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
289
290; condition codes: this one is used by final_prescan_insn to speed up
291; conditionalizing instructions.  It saves having to scan the rtl to see if
292; it uses or alters the condition codes.
293;
294; USE means that the condition codes are used by the insn in the process of
295;   outputting code, this means (at present) that we can't use the insn in
296;   inlined branches
297;
298; SET means that the purpose of the insn is to set the condition codes in a
299;   well defined manner.
300;
301; CLOB means that the condition codes are altered in an undefined manner, if
302;   they are altered at all
303;
304; UNCONDITIONAL means the instruction cannot be conditionally executed and
305;   that the instruction does not use or alter the condition codes.
306;
307; NOCOND means that the instruction does not use or alter the condition
308;   codes but can be converted into a conditionally exectuted instruction.
309
310(define_attr "conds" "use,set,clob,unconditional,nocond"
311	(if_then_else
312	 (ior (eq_attr "is_thumb1" "yes")
313	      (eq_attr "type" "call"))
314	 (const_string "clob")
315         (if_then_else
316	  (ior (eq_attr "is_neon_type" "yes")
317	       (eq_attr "is_mve_type" "yes"))
318	  (const_string "unconditional")
319	  (const_string "nocond"))))
320
321; Predicable means that the insn can be conditionally executed based on
322; an automatically added predicate (additional patterns are generated by
323; gen...).  We default to 'no' because no Thumb patterns match this rule
324; and not all ARM patterns do.
325(define_attr "predicable" "no,yes" (const_string "no"))
326
327; Only model the write buffer for ARM6 and ARM7.  Earlier processors don't
328; have one.  Later ones, such as StrongARM, have write-back caches, so don't
329; suffer blockages enough to warrant modelling this (and it can adversely
330; affect the schedule).
331(define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
332
333; WRITE_CONFLICT implies that a read following an unrelated write is likely
334; to stall the processor.  Used with model_wbuf above.
335(define_attr "write_conflict" "no,yes"
336  (if_then_else (eq_attr "type"
337		 "block,call,load_4")
338		(const_string "yes")
339		(const_string "no")))
340
341; Classify the insns into those that take one cycle and those that take more
342; than one on the main cpu execution unit.
343(define_attr "core_cycles" "single,multi"
344  (if_then_else (eq_attr "type"
345    "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
346    alu_shift_imm_lsl_1to4, alu_shift_imm_other, alu_shift_reg, alu_dsp_reg,\
347    alus_ext, alus_imm, alus_sreg,\
348    alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
349    logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
350    logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
351    wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
352    wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
353    wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
354    wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
355    wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
356    wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
357    wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
358    wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
359    wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
360    wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
361    wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
362		(const_string "single")
363	        (const_string "multi")))
364
365;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
366;; distant label.  Only applicable to Thumb code.
367(define_attr "far_jump" "yes,no" (const_string "no"))
368
369
370;; The number of machine instructions this pattern expands to.
371;; Used for Thumb-2 conditional execution.
372(define_attr "ce_count" "" (const_int 1))
373
374;;---------------------------------------------------------------------------
375;; Unspecs
376
377(include "unspecs.md")
378
379;;---------------------------------------------------------------------------
380;; Mode iterators
381
382(include "iterators.md")
383
384;;---------------------------------------------------------------------------
385;; Predicates
386
387(include "predicates.md")
388(include "constraints.md")
389
390;;---------------------------------------------------------------------------
391;; Pipeline descriptions
392
393(define_attr "tune_cortexr4" "yes,no"
394  (const (if_then_else
395	  (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
396	  (const_string "yes")
397	  (const_string "no"))))
398
399;; True if the generic scheduling description should be used.
400
401(define_attr "generic_sched" "yes,no"
402  (const (if_then_else
403          (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
404                                arm926ejs,arm10e,arm1026ejs,arm1136js,\
405                                arm1136jfs,cortexa5,cortexa7,cortexa8,\
406                                cortexa9,cortexa12,cortexa15,cortexa17,\
407                                cortexa53,cortexa57,cortexm4,cortexm7,\
408				exynosm1,marvell_pj4,xgene1")
409	       (eq_attr "tune_cortexr4" "yes"))
410          (const_string "no")
411          (const_string "yes"))))
412
413(define_attr "generic_vfp" "yes,no"
414  (const (if_then_else
415	  (and (eq_attr "fpu" "vfp")
416	       (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
417                                cortexa8,cortexa9,cortexa53,cortexm4,\
418                                cortexm7,marvell_pj4,xgene1")
419	       (eq_attr "tune_cortexr4" "no"))
420	  (const_string "yes")
421	  (const_string "no"))))
422
423(include "marvell-f-iwmmxt.md")
424(include "arm-generic.md")
425(include "arm926ejs.md")
426(include "arm1020e.md")
427(include "arm1026ejs.md")
428(include "arm1136jfs.md")
429(include "fa526.md")
430(include "fa606te.md")
431(include "fa626te.md")
432(include "fmp626.md")
433(include "fa726te.md")
434(include "cortex-a5.md")
435(include "cortex-a7.md")
436(include "cortex-a8.md")
437(include "cortex-a9.md")
438(include "cortex-a15.md")
439(include "cortex-a17.md")
440(include "cortex-a53.md")
441(include "cortex-a57.md")
442(include "cortex-r4.md")
443(include "cortex-r4f.md")
444(include "cortex-m7.md")
445(include "cortex-m4.md")
446(include "cortex-m4-fpu.md")
447(include "exynos-m1.md")
448(include "vfp11.md")
449(include "marvell-pj4.md")
450(include "xgene1.md")
451
452;; define_subst and associated attributes
453
454(define_subst "add_setq"
455  [(set (match_operand:SI 0 "" "")
456        (match_operand:SI 1 "" ""))]
457  ""
458  [(set (match_dup 0)
459        (match_dup 1))
460   (set (reg:CC APSRQ_REGNUM)
461	(unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))])
462
463(define_subst_attr "add_clobber_q_name" "add_setq" "" "_setq")
464(define_subst_attr "add_clobber_q_pred" "add_setq" "!ARM_Q_BIT_READ"
465		   "ARM_Q_BIT_READ")
466
467;;---------------------------------------------------------------------------
468;; Insn patterns
469;;
470;; Addition insns.
471
472;; Note: For DImode insns, there is normally no reason why operands should
473;; not be in the same register, what we don't want is for something being
474;; written to partially overlap something that is an input.
475
476(define_expand "adddi3"
477 [(parallel
478   [(set (match_operand:DI           0 "s_register_operand")
479	  (plus:DI (match_operand:DI 1 "s_register_operand")
480		   (match_operand:DI 2 "reg_or_int_operand")))
481    (clobber (reg:CC CC_REGNUM))])]
482  "TARGET_EITHER"
483  "
484  if (TARGET_THUMB1)
485    {
486      if (!REG_P (operands[2]))
487	operands[2] = force_reg (DImode, operands[2]);
488    }
489  else
490    {
491      rtx lo_result, hi_result, lo_dest, hi_dest;
492      rtx lo_op1, hi_op1, lo_op2, hi_op2;
493      arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
494			      &lo_op2, &hi_op2);
495      lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
496      hi_result = hi_dest = gen_highpart (SImode, operands[0]);
497
498      if (lo_op2 == const0_rtx)
499	{
500	  lo_dest = lo_op1;
501	  if (!arm_add_operand (hi_op2, SImode))
502	    hi_op2 = force_reg (SImode, hi_op2);
503	  /* Assume hi_op2 won't also be zero.  */
504	  emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
505	}
506      else
507	{
508	  if (!arm_add_operand (lo_op2, SImode))
509	    lo_op2 = force_reg (SImode, lo_op2);
510	  if (!arm_not_operand (hi_op2, SImode))
511	    hi_op2 = force_reg (SImode, hi_op2);
512
513	  emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
514	  rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
515				   const0_rtx);
516	  if (hi_op2 == const0_rtx)
517	    emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
518	  else
519	    emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
520	}
521
522      if (lo_result != lo_dest)
523	emit_move_insn (lo_result, lo_dest);
524      if (hi_result != hi_dest)
525	emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
526      DONE;
527    }
528  "
529)
530
531(define_expand "addvsi4"
532  [(match_operand:SI 0 "s_register_operand")
533   (match_operand:SI 1 "s_register_operand")
534   (match_operand:SI 2 "arm_add_operand")
535   (match_operand 3 "")]
536  "TARGET_32BIT"
537{
538  if (CONST_INT_P (operands[2]))
539    emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
540  else
541    emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
542  arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
543
544  DONE;
545})
546
547(define_expand "addvdi4"
548  [(match_operand:DI 0 "s_register_operand")
549   (match_operand:DI 1 "s_register_operand")
550   (match_operand:DI 2 "reg_or_int_operand")
551   (match_operand 3 "")]
552  "TARGET_32BIT"
553{
554  rtx lo_result, hi_result;
555  rtx lo_op1, hi_op1, lo_op2, hi_op2;
556  arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
557			  &lo_op2, &hi_op2);
558  lo_result = gen_lowpart (SImode, operands[0]);
559  hi_result = gen_highpart (SImode, operands[0]);
560
561  if (lo_op2 == const0_rtx)
562    {
563      emit_move_insn (lo_result, lo_op1);
564      if (!arm_add_operand (hi_op2, SImode))
565	hi_op2 = force_reg (SImode, hi_op2);
566
567      emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
568    }
569  else
570    {
571      if (!arm_add_operand (lo_op2, SImode))
572	lo_op2 = force_reg (SImode, lo_op2);
573      if (!arm_not_operand (hi_op2, SImode))
574	hi_op2 = force_reg (SImode, hi_op2);
575
576      emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
577
578      if (hi_op2 == const0_rtx)
579        emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
580      else if (CONST_INT_P (hi_op2))
581        emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
582      else
583        emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
584
585      arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
586    }
587
588  DONE;
589})
590
591(define_expand "addsi3_cin_vout_reg"
592  [(parallel
593    [(set (match_dup 3)
594	  (compare:CC_V
595	   (plus:DI
596	    (plus:DI (match_dup 4)
597		     (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
598	    (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
599	   (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
600				    (match_dup 2)))))
601     (set (match_operand:SI 0 "s_register_operand")
602	  (plus:SI (plus:SI (match_dup 5) (match_dup 1))
603		   (match_dup 2)))])]
604  "TARGET_32BIT"
605  {
606    operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
607    rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
608    operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
609    operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
610  }
611)
612
613(define_insn "*addsi3_cin_vout_reg_insn"
614  [(set (reg:CC_V CC_REGNUM)
615	(compare:CC_V
616	 (plus:DI
617	  (plus:DI
618	   (match_operand:DI 3 "arm_carry_operation" "")
619	   (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
620	  (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
621	 (sign_extend:DI
622	  (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
623			    (match_dup 1))
624		   (match_dup 2)))))
625   (set (match_operand:SI 0 "s_register_operand" "=l,r")
626	(plus:SI (plus:SI (match_dup 4) (match_dup 1))
627		 (match_dup 2)))]
628  "TARGET_32BIT"
629  "@
630   adcs%?\\t%0, %0, %2
631   adcs%?\\t%0, %1, %2"
632  [(set_attr "type" "alus_sreg")
633   (set_attr "arch" "t2,*")
634   (set_attr "length" "2,4")]
635)
636
637(define_expand "addsi3_cin_vout_imm"
638  [(parallel
639    [(set (match_dup 3)
640	  (compare:CC_V
641	   (plus:DI
642	    (plus:DI (match_dup 4)
643		     (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
644	    (match_dup 2))
645	   (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
646				    (match_dup 2)))))
647     (set (match_operand:SI 0 "s_register_operand")
648	  (plus:SI (plus:SI (match_dup 5) (match_dup 1))
649		   (match_operand 2 "arm_adcimm_operand")))])]
650  "TARGET_32BIT"
651  {
652    operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
653    rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
654    operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
655    operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
656  }
657)
658
659(define_insn "*addsi3_cin_vout_imm_insn"
660  [(set (reg:CC_V CC_REGNUM)
661	(compare:CC_V
662	 (plus:DI
663	  (plus:DI
664	   (match_operand:DI 3 "arm_carry_operation" "")
665	   (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
666	  (match_operand 2 "arm_adcimm_operand" "I,K"))
667	 (sign_extend:DI
668	  (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
669			    (match_dup 1))
670		   (match_dup 2)))))
671   (set (match_operand:SI 0 "s_register_operand" "=r,r")
672	(plus:SI (plus:SI (match_dup 4) (match_dup 1))
673		 (match_dup 2)))]
674  "TARGET_32BIT"
675  "@
676   adcs%?\\t%0, %1, %2
677   sbcs%?\\t%0, %1, #%B2"
678  [(set_attr "type" "alus_imm")]
679)
680
681(define_expand "addsi3_cin_vout_0"
682  [(parallel
683    [(set (match_dup 2)
684	  (compare:CC_V
685	   (plus:DI (match_dup 3)
686		    (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
687	   (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
688     (set (match_operand:SI 0 "s_register_operand")
689	  (plus:SI (match_dup 4) (match_dup 1)))])]
690  "TARGET_32BIT"
691  {
692    operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
693    rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
694    operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
695    operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
696  }
697)
698
699(define_insn "*addsi3_cin_vout_0_insn"
700  [(set (reg:CC_V CC_REGNUM)
701	(compare:CC_V
702	 (plus:DI
703	  (match_operand:DI 2 "arm_carry_operation" "")
704	  (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
705	 (sign_extend:DI (plus:SI
706			  (match_operand:SI 3 "arm_carry_operation" "")
707			  (match_dup 1)))))
708   (set (match_operand:SI 0 "s_register_operand" "=r")
709	(plus:SI (match_dup 3) (match_dup 1)))]
710  "TARGET_32BIT"
711  "adcs%?\\t%0, %1, #0"
712  [(set_attr "type" "alus_imm")]
713)
714
715(define_expand "uaddvsi4"
716  [(match_operand:SI 0 "s_register_operand")
717   (match_operand:SI 1 "s_register_operand")
718   (match_operand:SI 2 "arm_add_operand")
719   (match_operand 3 "")]
720  "TARGET_32BIT"
721{
722  emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
723  arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
724
725  DONE;
726})
727
728(define_expand "uaddvdi4"
729  [(match_operand:DI 0 "s_register_operand")
730   (match_operand:DI 1 "s_register_operand")
731   (match_operand:DI 2 "reg_or_int_operand")
732   (match_operand 3 "")]
733  "TARGET_32BIT"
734{
735  rtx lo_result, hi_result;
736  rtx lo_op1, hi_op1, lo_op2, hi_op2;
737  arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
738			  &lo_op2, &hi_op2);
739  lo_result = gen_lowpart (SImode, operands[0]);
740  hi_result = gen_highpart (SImode, operands[0]);
741
742  if (lo_op2 == const0_rtx)
743    {
744      emit_move_insn (lo_result, lo_op1);
745      if (!arm_add_operand (hi_op2, SImode))
746	hi_op2 = force_reg (SImode, hi_op2);
747
748      emit_insn (gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
749    }
750  else
751    {
752      if (!arm_add_operand (lo_op2, SImode))
753	lo_op2 = force_reg (SImode, lo_op2);
754      if (!arm_not_operand (hi_op2, SImode))
755	hi_op2 = force_reg (SImode, hi_op2);
756
757      emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
758
759      if (hi_op2 == const0_rtx)
760        emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
761      else if (CONST_INT_P (hi_op2))
762        emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
763      else
764        emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
765
766      arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
767    }
768
769  DONE;
770})
771
772(define_expand "addsi3_cin_cout_reg"
773  [(parallel
774    [(set (match_dup 3)
775	  (compare:CC_ADC
776	   (plus:DI
777	    (plus:DI (match_dup 4)
778		     (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
779	    (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
780	   (const_int 4294967296)))
781     (set (match_operand:SI 0 "s_register_operand")
782	  (plus:SI (plus:SI (match_dup 5) (match_dup 1))
783		   (match_dup 2)))])]
784  "TARGET_32BIT"
785  {
786    operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
787    rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
788    operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
789    operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
790  }
791)
792
793(define_insn "*addsi3_cin_cout_reg_insn"
794  [(set (reg:CC_ADC CC_REGNUM)
795	(compare:CC_ADC
796	 (plus:DI
797	  (plus:DI
798	   (match_operand:DI 3 "arm_carry_operation" "")
799	   (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
800	  (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
801	(const_int 4294967296)))
802   (set (match_operand:SI 0 "s_register_operand" "=l,r")
803	(plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
804			  (match_dup 1))
805		 (match_dup 2)))]
806  "TARGET_32BIT"
807  "@
808   adcs%?\\t%0, %0, %2
809   adcs%?\\t%0, %1, %2"
810  [(set_attr "type" "alus_sreg")
811   (set_attr "arch" "t2,*")
812   (set_attr "length" "2,4")]
813)
814
815(define_expand "addsi3_cin_cout_imm"
816  [(parallel
817    [(set (match_dup 3)
818	  (compare:CC_ADC
819	   (plus:DI
820	    (plus:DI (match_dup 4)
821		     (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
822	    (match_dup 6))
823	   (const_int 4294967296)))
824     (set (match_operand:SI 0 "s_register_operand")
825	  (plus:SI (plus:SI (match_dup 5) (match_dup 1))
826		   (match_operand:SI 2 "arm_adcimm_operand")))])]
827  "TARGET_32BIT"
828  {
829    operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
830    rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
831    operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
832    operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
833    operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
834  }
835)
836
837(define_insn "*addsi3_cin_cout_imm_insn"
838  [(set (reg:CC_ADC CC_REGNUM)
839	(compare:CC_ADC
840	 (plus:DI
841	  (plus:DI
842	   (match_operand:DI 3 "arm_carry_operation" "")
843	   (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
844	  (match_operand:DI 5 "const_int_operand" "n,n"))
845	(const_int 4294967296)))
846   (set (match_operand:SI 0 "s_register_operand" "=r,r")
847	(plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
848			  (match_dup 1))
849		 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
850  "TARGET_32BIT
851   && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
852  "@
853   adcs%?\\t%0, %1, %2
854   sbcs%?\\t%0, %1, #%B2"
855  [(set_attr "type" "alus_imm")]
856)
857
858(define_expand "addsi3_cin_cout_0"
859  [(parallel
860    [(set (match_dup 2)
861	  (compare:CC_ADC
862	   (plus:DI (match_dup 3)
863		    (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
864	   (const_int 4294967296)))
865     (set (match_operand:SI 0 "s_register_operand")
866	  (plus:SI (match_dup 4) (match_dup 1)))])]
867  "TARGET_32BIT"
868  {
869    operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
870    rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
871    operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
872    operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
873  }
874)
875
876(define_insn "*addsi3_cin_cout_0_insn"
877  [(set (reg:CC_ADC CC_REGNUM)
878	(compare:CC_ADC
879	 (plus:DI
880	  (match_operand:DI 2 "arm_carry_operation" "")
881	  (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
882	(const_int 4294967296)))
883   (set (match_operand:SI 0 "s_register_operand" "=r")
884	(plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
885  "TARGET_32BIT"
886  "adcs%?\\t%0, %1, #0"
887  [(set_attr "type" "alus_imm")]
888)
889
890(define_expand "addsi3"
891  [(set (match_operand:SI          0 "s_register_operand")
892	(plus:SI (match_operand:SI 1 "s_register_operand")
893		 (match_operand:SI 2 "reg_or_int_operand")))]
894  "TARGET_EITHER"
895  "
896  if (TARGET_32BIT && CONST_INT_P (operands[2]))
897    {
898      arm_split_constant (PLUS, SImode, NULL_RTX,
899	                  INTVAL (operands[2]), operands[0], operands[1],
900			  optimize && can_create_pseudo_p ());
901      DONE;
902    }
903  "
904)
905
906; If there is a scratch available, this will be faster than synthesizing the
907; addition.
908(define_peephole2
909  [(match_scratch:SI 3 "r")
910   (set (match_operand:SI          0 "arm_general_register_operand" "")
911	(plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
912		 (match_operand:SI 2 "const_int_operand"  "")))]
913  "TARGET_32BIT &&
914   !(const_ok_for_arm (INTVAL (operands[2]))
915     || const_ok_for_arm (-INTVAL (operands[2])))
916    && const_ok_for_arm (~INTVAL (operands[2]))"
917  [(set (match_dup 3) (match_dup 2))
918   (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
919  ""
920)
921
922;; The r/r/k alternative is required when reloading the address
923;;  (plus (reg rN) (reg sp)) into (reg rN).  In this case reload will
924;; put the duplicated register first, and not try the commutative version.
925(define_insn_and_split "*arm_addsi3"
926  [(set (match_operand:SI          0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
927	(plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
928		 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
929  "TARGET_32BIT"
930  "@
931   add%?\\t%0, %0, %2
932   add%?\\t%0, %1, %2
933   add%?\\t%0, %1, %2
934   add%?\\t%0, %1, %2
935   add%?\\t%0, %1, %2
936   add%?\\t%0, %1, %2
937   add%?\\t%0, %2, %1
938   add%?\\t%0, %1, %2
939   addw%?\\t%0, %1, %2
940   addw%?\\t%0, %1, %2
941   sub%?\\t%0, %1, #%n2
942   sub%?\\t%0, %1, #%n2
943   sub%?\\t%0, %1, #%n2
944   subw%?\\t%0, %1, #%n2
945   subw%?\\t%0, %1, #%n2
946   #"
947  "TARGET_32BIT
948   && CONST_INT_P (operands[2])
949   && !const_ok_for_op (INTVAL (operands[2]), PLUS)
950   && (reload_completed || !arm_eliminable_register (operands[1]))"
951  [(clobber (const_int 0))]
952  "
953  arm_split_constant (PLUS, SImode, curr_insn,
954	              INTVAL (operands[2]), operands[0],
955		      operands[1], 0);
956  DONE;
957  "
958  [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
959   (set_attr "predicable" "yes")
960   (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
961   (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
962   (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
963		      (const_string "alu_imm")
964		      (const_string "alu_sreg")))
965 ]
966)
967
968(define_insn "addsi3_compareV_reg"
969  [(set (reg:CC_V CC_REGNUM)
970	(compare:CC_V
971	  (plus:DI
972	    (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
973	    (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
974	  (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
975   (set (match_operand:SI 0 "register_operand" "=l,r,r")
976	(plus:SI (match_dup 1) (match_dup 2)))]
977  "TARGET_32BIT"
978  "adds%?\\t%0, %1, %2"
979  [(set_attr "conds" "set")
980   (set_attr "arch" "t2,t2,*")
981   (set_attr "length" "2,2,4")
982   (set_attr "type" "alus_sreg")]
983)
984
985(define_insn "*addsi3_compareV_reg_nosum"
986  [(set (reg:CC_V CC_REGNUM)
987	(compare:CC_V
988	  (plus:DI
989	    (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
990	    (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
991	  (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
992  "TARGET_32BIT"
993  "cmn%?\\t%0, %1"
994  [(set_attr "conds" "set")
995   (set_attr "arch" "t2,*")
996   (set_attr "length" "2,4")
997   (set_attr "type" "alus_sreg")]
998)
999
1000(define_insn "subvsi3_intmin"
1001  [(set (reg:CC_V CC_REGNUM)
1002	(compare:CC_V
1003	  (plus:DI
1004	    (sign_extend:DI
1005	     (match_operand:SI 1 "register_operand" "r"))
1006	    (const_int 2147483648))
1007	  (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
1008   (set (match_operand:SI 0 "register_operand" "=r")
1009	(plus:SI (match_dup 1) (const_int -2147483648)))]
1010  "TARGET_32BIT"
1011  "subs%?\\t%0, %1, #-2147483648"
1012  [(set_attr "conds" "set")
1013   (set_attr "type" "alus_imm")]
1014)
1015
1016(define_insn "addsi3_compareV_imm"
1017  [(set (reg:CC_V CC_REGNUM)
1018	(compare:CC_V
1019	  (plus:DI
1020	    (sign_extend:DI
1021	     (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
1022	    (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
1023	  (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
1024   (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
1025	(plus:SI (match_dup 1) (match_dup 2)))]
1026  "TARGET_32BIT
1027   && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
1028  "@
1029   adds%?\\t%0, %1, %2
1030   adds%?\\t%0, %0, %2
1031   subs%?\\t%0, %1, #%n2
1032   subs%?\\t%0, %0, #%n2
1033   adds%?\\t%0, %1, %2
1034   subs%?\\t%0, %1, #%n2"
1035  [(set_attr "conds" "set")
1036   (set_attr "arch" "t2,t2,t2,t2,*,*")
1037   (set_attr "length" "2,2,2,2,4,4")
1038   (set_attr "type" "alus_imm")]
1039)
1040
1041(define_insn "addsi3_compareV_imm_nosum"
1042  [(set (reg:CC_V CC_REGNUM)
1043	(compare:CC_V
1044	  (plus:DI
1045	    (sign_extend:DI
1046	     (match_operand:SI 0 "register_operand" "l,r,r"))
1047	    (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1048	  (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1049  "TARGET_32BIT
1050   && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1051  "@
1052   cmp%?\\t%0, #%n1
1053   cmn%?\\t%0, %1
1054   cmp%?\\t%0, #%n1"
1055  [(set_attr "conds" "set")
1056   (set_attr "arch" "t2,*,*")
1057   (set_attr "length" "2,4,4")
1058   (set_attr "type" "alus_imm")]
1059)
1060
1061;; We can handle more constants efficently if we can clobber either a scratch
1062;; or the other source operand.  We deliberately leave this late as in
1063;; high register pressure situations it's not worth forcing any reloads.
1064(define_peephole2
1065  [(match_scratch:SI 2 "l")
1066   (set (reg:CC_V CC_REGNUM)
1067	(compare:CC_V
1068	  (plus:DI
1069	    (sign_extend:DI
1070	     (match_operand:SI 0 "low_register_operand"))
1071	    (match_operand 1 "const_int_operand"))
1072	  (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1073  "TARGET_THUMB2
1074   && satisfies_constraint_Pd (operands[1])"
1075  [(parallel[
1076    (set (reg:CC_V CC_REGNUM)
1077	 (compare:CC_V
1078	  (plus:DI (sign_extend:DI (match_dup 0))
1079		   (sign_extend:DI (match_dup 1)))
1080	  (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1081    (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1082)
1083
1084(define_peephole2
1085  [(set (reg:CC_V CC_REGNUM)
1086	(compare:CC_V
1087	  (plus:DI
1088	    (sign_extend:DI
1089	     (match_operand:SI 0 "low_register_operand"))
1090	    (match_operand 1 "const_int_operand"))
1091	  (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1092  "TARGET_THUMB2
1093   && dead_or_set_p (peep2_next_insn (0), operands[0])
1094   && satisfies_constraint_Py (operands[1])"
1095  [(parallel[
1096    (set (reg:CC_V CC_REGNUM)
1097	 (compare:CC_V
1098	  (plus:DI (sign_extend:DI (match_dup 0))
1099		   (sign_extend:DI (match_dup 1)))
1100	  (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1101    (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1102)
1103
1104(define_insn "addsi3_compare0"
1105  [(set (reg:CC_NZ CC_REGNUM)
1106	(compare:CC_NZ
1107	 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1108		  (match_operand:SI 2 "arm_add_operand"    "I,L,r"))
1109	 (const_int 0)))
1110   (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1111	(plus:SI (match_dup 1) (match_dup 2)))]
1112  "TARGET_ARM"
1113  "@
1114   adds%?\\t%0, %1, %2
1115   subs%?\\t%0, %1, #%n2
1116   adds%?\\t%0, %1, %2"
1117  [(set_attr "conds" "set")
1118   (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1119)
1120
1121(define_insn "*addsi3_compare0_scratch"
1122  [(set (reg:CC_NZ CC_REGNUM)
1123	(compare:CC_NZ
1124	 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1125		  (match_operand:SI 1 "arm_add_operand"    "I,L, r"))
1126	 (const_int 0)))]
1127  "TARGET_ARM"
1128  "@
1129   cmn%?\\t%0, %1
1130   cmp%?\\t%0, #%n1
1131   cmn%?\\t%0, %1"
1132  [(set_attr "conds" "set")
1133   (set_attr "predicable" "yes")
1134   (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1135)
1136
1137(define_insn "*compare_negsi_si"
1138  [(set (reg:CC_Z CC_REGNUM)
1139	(compare:CC_Z
1140	 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1141	 (match_operand:SI 1 "s_register_operand" "l,r")))]
1142  "TARGET_32BIT"
1143  "cmn%?\\t%1, %0"
1144  [(set_attr "conds" "set")
1145   (set_attr "predicable" "yes")
1146   (set_attr "arch" "t2,*")
1147   (set_attr "length" "2,4")
1148   (set_attr "predicable_short_it" "yes,no")
1149   (set_attr "type" "alus_sreg")]
1150)
1151
1152;; This is the canonicalization of subsi3_compare when the
1153;; addend is a constant.
1154(define_insn "cmpsi2_addneg"
1155  [(set (reg:CC CC_REGNUM)
1156	(compare:CC
1157	 (match_operand:SI 1 "s_register_operand" "r,r")
1158	 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1159   (set (match_operand:SI 0 "s_register_operand" "=r,r")
1160	(plus:SI (match_dup 1)
1161		 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1162  "TARGET_32BIT
1163   && (INTVAL (operands[2])
1164       == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1165{
1166  /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1167     in different condition codes (like cmn rather than like cmp), so that
1168     alternative comes first.  Both alternatives can match for any 0x??000000
1169     where except for 0 and INT_MIN it doesn't matter what we choose, and also
1170     for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1171     as it is shorter.  */
1172  if (which_alternative == 0 && operands[3] != const1_rtx)
1173    return "subs%?\\t%0, %1, #%n3";
1174  else
1175    return "adds%?\\t%0, %1, %3";
1176}
1177  [(set_attr "conds" "set")
1178   (set_attr "type" "alus_sreg")]
1179)
1180
1181;; Convert the sequence
1182;;  sub  rd, rn, #1
1183;;  cmn  rd, #1	(equivalent to cmp rd, #-1)
1184;;  bne  dest
1185;; into
1186;;  subs rd, rn, #1
1187;;  bcs  dest	((unsigned)rn >= 1)
1188;; similarly for the beq variant using bcc.
1189;; This is a common looping idiom (while (n--))
1190(define_peephole2
1191  [(set (match_operand:SI 0 "arm_general_register_operand" "")
1192	(plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1193		 (const_int -1)))
1194   (set (match_operand 2 "cc_register" "")
1195	(compare (match_dup 0) (const_int -1)))
1196   (set (pc)
1197	(if_then_else (match_operator 3 "equality_operator"
1198		       [(match_dup 2) (const_int 0)])
1199		      (match_operand 4 "" "")
1200		      (match_operand 5 "" "")))]
1201  "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1202  [(parallel[
1203    (set (match_dup 2)
1204	 (compare:CC
1205	  (match_dup 1) (const_int 1)))
1206    (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1207   (set (pc)
1208	(if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1209		      (match_dup 4)
1210		      (match_dup 5)))]
1211  "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1212   operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1213				  ? GEU : LTU),
1214				 VOIDmode,
1215				 operands[2], const0_rtx);"
1216)
1217
1218;; The next four insns work because they compare the result with one of
1219;; the operands, and we know that the use of the condition code is
1220;; either GEU or LTU, so we can use the carry flag from the addition
1221;; instead of doing the compare a second time.
1222(define_insn "addsi3_compare_op1"
1223  [(set (reg:CC_C CC_REGNUM)
1224	(compare:CC_C
1225	 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1226		  (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1227	 (match_dup 1)))
1228   (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1229	(plus:SI (match_dup 1) (match_dup 2)))]
1230  "TARGET_32BIT"
1231  "@
1232   adds%?\\t%0, %1, %2
1233   adds%?\\t%0, %0, %2
1234   subs%?\\t%0, %1, #%n2
1235   subs%?\\t%0, %0, #%n2
1236   adds%?\\t%0, %1, %2
1237   subs%?\\t%0, %1, #%n2"
1238  [(set_attr "conds" "set")
1239   (set_attr "arch" "t2,t2,t2,t2,*,*")
1240   (set_attr "length" "2,2,2,2,4,4")
1241   (set (attr "type")
1242	(if_then_else (match_operand 2 "const_int_operand")
1243		      (const_string "alu_imm")
1244		      (const_string "alu_sreg")))]
1245)
1246
1247(define_insn "*addsi3_compare_op2"
1248  [(set (reg:CC_C CC_REGNUM)
1249	(compare:CC_C
1250	 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1251		  (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1252	 (match_dup 2)))
1253   (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1254	(plus:SI (match_dup 1) (match_dup 2)))]
1255  "TARGET_32BIT"
1256  "@
1257   adds%?\\t%0, %1, %2
1258   adds%?\\t%0, %0, %2
1259   subs%?\\t%0, %1, #%n2
1260   subs%?\\t%0, %0, #%n2
1261   adds%?\\t%0, %1, %2
1262   subs%?\\t%0, %1, #%n2"
1263  [(set_attr "conds" "set")
1264   (set_attr "arch" "t2,t2,t2,t2,*,*")
1265   (set_attr "length" "2,2,2,2,4,4")
1266   (set (attr "type")
1267	(if_then_else (match_operand 2 "const_int_operand")
1268		      (const_string "alu_imm")
1269		      (const_string "alu_sreg")))]
1270)
1271
1272(define_insn "*compare_addsi2_op0"
1273  [(set (reg:CC_C CC_REGNUM)
1274        (compare:CC_C
1275          (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1276                   (match_operand:SI 1 "arm_add_operand"    "l,Pw,rI,L"))
1277          (match_dup 0)))]
1278  "TARGET_32BIT"
1279  "@
1280   cmn%?\\t%0, %1
1281   cmp%?\\t%0, #%n1
1282   cmn%?\\t%0, %1
1283   cmp%?\\t%0, #%n1"
1284  [(set_attr "conds" "set")
1285   (set_attr "predicable" "yes")
1286   (set_attr "arch" "t2,t2,*,*")
1287   (set_attr "predicable_short_it" "yes,yes,no,no")
1288   (set_attr "length" "2,2,4,4")
1289   (set (attr "type")
1290	(if_then_else (match_operand 1 "const_int_operand")
1291		      (const_string "alu_imm")
1292		      (const_string "alu_sreg")))]
1293)
1294
1295(define_insn "*compare_addsi2_op1"
1296  [(set (reg:CC_C CC_REGNUM)
1297        (compare:CC_C
1298          (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1299                   (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1300          (match_dup 1)))]
1301  "TARGET_32BIT"
1302  "@
1303   cmn%?\\t%0, %1
1304   cmp%?\\t%0, #%n1
1305   cmn%?\\t%0, %1
1306   cmp%?\\t%0, #%n1"
1307  [(set_attr "conds" "set")
1308   (set_attr "predicable" "yes")
1309   (set_attr "arch" "t2,t2,*,*")
1310   (set_attr "predicable_short_it" "yes,yes,no,no")
1311   (set_attr "length" "2,2,4,4")
1312   (set (attr "type")
1313	(if_then_else (match_operand 1 "const_int_operand")
1314		      (const_string "alu_imm")
1315		      (const_string "alu_sreg")))]
1316 )
1317
1318(define_insn "addsi3_carryin"
1319  [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1320        (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1321                          (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1322                 (match_operand:SI 3 "arm_carry_operation" "")))]
1323  "TARGET_32BIT"
1324  "@
1325   adc%?\\t%0, %1, %2
1326   adc%?\\t%0, %1, %2
1327   sbc%?\\t%0, %1, #%B2"
1328  [(set_attr "conds" "use")
1329   (set_attr "predicable" "yes")
1330   (set_attr "arch" "t2,*,*")
1331   (set_attr "length" "4")
1332   (set_attr "predicable_short_it" "yes,no,no")
1333   (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1334)
1335
1336;; Canonicalization of the above when the immediate is zero.
1337(define_insn "add0si3_carryin"
1338  [(set (match_operand:SI 0 "s_register_operand" "=r")
1339	(plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1340		 (match_operand:SI 1 "arm_not_operand" "r")))]
1341  "TARGET_32BIT"
1342  "adc%?\\t%0, %1, #0"
1343  [(set_attr "conds" "use")
1344   (set_attr "predicable" "yes")
1345   (set_attr "length" "4")
1346   (set_attr "type" "adc_imm")]
1347)
1348
1349(define_insn "*addsi3_carryin_alt2"
1350  [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1351        (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1352                          (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1353                 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1354  "TARGET_32BIT"
1355  "@
1356   adc%?\\t%0, %1, %2
1357   adc%?\\t%0, %1, %2
1358   sbc%?\\t%0, %1, #%B2"
1359  [(set_attr "conds" "use")
1360   (set_attr "predicable" "yes")
1361   (set_attr "arch" "t2,*,*")
1362   (set_attr "length" "4")
1363   (set_attr "predicable_short_it" "yes,no,no")
1364   (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1365)
1366
1367(define_insn "*addsi3_carryin_shift"
1368  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1369	(plus:SI (plus:SI
1370		  (match_operator:SI 2 "shift_operator"
1371		    [(match_operand:SI 3 "s_register_operand" "r,r")
1372		     (match_operand:SI 4 "shift_amount_operand" "M,r")])
1373		  (match_operand:SI 5 "arm_carry_operation" ""))
1374		 (match_operand:SI 1 "s_register_operand" "r,r")))]
1375  "TARGET_32BIT"
1376  "adc%?\\t%0, %1, %3%S2"
1377  [(set_attr "conds" "use")
1378   (set_attr "arch" "32,a")
1379   (set_attr "shift" "3")
1380   (set_attr "predicable" "yes")
1381   (set_attr "autodetect_type" "alu_shift_operator2")]
1382)
1383
1384(define_insn "*addsi3_carryin_clobercc"
1385  [(set (match_operand:SI 0 "s_register_operand" "=r")
1386	(plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1387			  (match_operand:SI 2 "arm_rhs_operand" "rI"))
1388		 (match_operand:SI 3 "arm_carry_operation" "")))
1389   (clobber (reg:CC CC_REGNUM))]
1390   "TARGET_32BIT"
1391   "adcs%?\\t%0, %1, %2"
1392   [(set_attr "conds" "set")
1393    (set_attr "type" "adcs_reg")]
1394)
1395
1396(define_expand "subvsi4"
1397  [(match_operand:SI 0 "s_register_operand")
1398   (match_operand:SI 1 "arm_rhs_operand")
1399   (match_operand:SI 2 "arm_add_operand")
1400   (match_operand 3 "")]
1401  "TARGET_32BIT"
1402{
1403  if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1404    {
1405      /* If both operands are constants we can decide the result statically.  */
1406      wi::overflow_type overflow;
1407      wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1408			      rtx_mode_t (operands[2], SImode),
1409			      SIGNED, &overflow);
1410      emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1411      if (overflow != wi::OVF_NONE)
1412	emit_jump_insn (gen_jump (operands[3]));
1413      DONE;
1414    }
1415  else if (CONST_INT_P (operands[2]))
1416    {
1417      operands[2] = GEN_INT (-INTVAL (operands[2]));
1418      /* Special case for INT_MIN.  */
1419      if (INTVAL (operands[2]) == 0x80000000)
1420	emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1421      else
1422	emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1423					  operands[2]));
1424    }
1425  else if (CONST_INT_P (operands[1]))
1426    emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1427  else
1428    emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1429
1430  arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1431  DONE;
1432})
1433
1434(define_expand "subvdi4"
1435  [(match_operand:DI 0 "s_register_operand")
1436   (match_operand:DI 1 "reg_or_int_operand")
1437   (match_operand:DI 2 "reg_or_int_operand")
1438   (match_operand 3 "")]
1439  "TARGET_32BIT"
1440{
1441  rtx lo_result, hi_result;
1442  rtx lo_op1, hi_op1, lo_op2, hi_op2;
1443  lo_result = gen_lowpart (SImode, operands[0]);
1444  hi_result = gen_highpart (SImode, operands[0]);
1445  machine_mode mode = CCmode;
1446
1447  if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1448    {
1449      /* If both operands are constants we can decide the result statically.  */
1450      wi::overflow_type overflow;
1451      wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1452			      rtx_mode_t (operands[2], DImode),
1453			      SIGNED, &overflow);
1454      emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1455      if (overflow != wi::OVF_NONE)
1456	emit_jump_insn (gen_jump (operands[3]));
1457      DONE;
1458    }
1459  else if (CONST_INT_P (operands[1]))
1460    {
1461      arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1462			      &lo_op1, &hi_op1);
1463      if (const_ok_for_arm (INTVAL (lo_op1)))
1464	{
1465	  emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1466					  GEN_INT (~UINTVAL (lo_op1))));
1467	  /* We could potentially use RSC here in Arm state, but not
1468	     in Thumb, so it's probably not worth the effort of handling
1469	     this.  */
1470	  hi_op1 = force_reg (SImode, hi_op1);
1471	  mode = CC_RSBmode;
1472	  goto highpart;
1473	}
1474      operands[1] = force_reg (DImode, operands[1]);
1475    }
1476
1477  arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1478			  &lo_op2, &hi_op2);
1479  if (lo_op2 == const0_rtx)
1480    {
1481      emit_move_insn (lo_result, lo_op1);
1482      if (!arm_add_operand (hi_op2, SImode))
1483        hi_op2 = force_reg (SImode, hi_op2);
1484      emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1485      DONE;
1486    }
1487
1488  if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1489    lo_op2 = force_reg (SImode, lo_op2);
1490  if (CONST_INT_P (lo_op2))
1491    emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1492				  gen_int_mode (-INTVAL (lo_op2), SImode)));
1493  else
1494    emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1495
1496 highpart:
1497  if (!arm_not_operand (hi_op2, SImode))
1498    hi_op2 = force_reg (SImode, hi_op2);
1499  rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1500  if (CONST_INT_P (hi_op2))
1501    emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1502				       gen_rtx_LTU (SImode, ccreg, const0_rtx),
1503				       gen_rtx_LTU (DImode, ccreg,
1504						    const0_rtx)));
1505  else
1506    emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1507				   gen_rtx_LTU (SImode, ccreg, const0_rtx),
1508				   gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1509  arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1510
1511  DONE;
1512})
1513
1514(define_expand "usubvsi4"
1515  [(match_operand:SI 0 "s_register_operand")
1516   (match_operand:SI 1 "arm_rhs_operand")
1517   (match_operand:SI 2 "arm_add_operand")
1518   (match_operand 3 "")]
1519  "TARGET_32BIT"
1520{
1521  machine_mode mode = CCmode;
1522  if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1523    {
1524      /* If both operands are constants we can decide the result statically.  */
1525      wi::overflow_type overflow;
1526      wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1527			      rtx_mode_t (operands[2], SImode),
1528			      UNSIGNED, &overflow);
1529      emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1530      if (overflow != wi::OVF_NONE)
1531	emit_jump_insn (gen_jump (operands[3]));
1532      DONE;
1533    }
1534  else if (CONST_INT_P (operands[2]))
1535    emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1536				  gen_int_mode (-INTVAL (operands[2]),
1537						SImode)));
1538  else if (CONST_INT_P (operands[1]))
1539    {
1540      mode = CC_RSBmode;
1541      emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1542				      GEN_INT (~UINTVAL (operands[1]))));
1543    }
1544  else
1545    emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1546  arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1547
1548  DONE;
1549})
1550
1551(define_expand "usubvdi4"
1552  [(match_operand:DI 0 "s_register_operand")
1553   (match_operand:DI 1 "reg_or_int_operand")
1554   (match_operand:DI 2 "reg_or_int_operand")
1555   (match_operand 3 "")]
1556  "TARGET_32BIT"
1557{
1558  rtx lo_result, hi_result;
1559  rtx lo_op1, hi_op1, lo_op2, hi_op2;
1560  lo_result = gen_lowpart (SImode, operands[0]);
1561  hi_result = gen_highpart (SImode, operands[0]);
1562  machine_mode mode = CCmode;
1563
1564  if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1565    {
1566      /* If both operands are constants we can decide the result statically.  */
1567      wi::overflow_type overflow;
1568      wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1569			      rtx_mode_t (operands[2], DImode),
1570			      UNSIGNED, &overflow);
1571      emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1572      if (overflow != wi::OVF_NONE)
1573	emit_jump_insn (gen_jump (operands[3]));
1574      DONE;
1575    }
1576  else if (CONST_INT_P (operands[1]))
1577    {
1578      arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1579			      &lo_op1, &hi_op1);
1580      if (const_ok_for_arm (INTVAL (lo_op1)))
1581	{
1582	  emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1583					  GEN_INT (~UINTVAL (lo_op1))));
1584	  /* We could potentially use RSC here in Arm state, but not
1585	     in Thumb, so it's probably not worth the effort of handling
1586	     this.  */
1587	  hi_op1 = force_reg (SImode, hi_op1);
1588	  mode = CC_RSBmode;
1589	  goto highpart;
1590	}
1591      operands[1] = force_reg (DImode, operands[1]);
1592    }
1593
1594  arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1595			  &lo_op2, &hi_op2);
1596  if (lo_op2 == const0_rtx)
1597    {
1598      emit_move_insn (lo_result, lo_op1);
1599      if (!arm_add_operand (hi_op2, SImode))
1600        hi_op2 = force_reg (SImode, hi_op2);
1601      emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1602      DONE;
1603    }
1604
1605  if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1606    lo_op2 = force_reg (SImode, lo_op2);
1607  if (CONST_INT_P (lo_op2))
1608    emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1609				  gen_int_mode (-INTVAL (lo_op2), SImode)));
1610  else
1611    emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1612
1613 highpart:
1614  if (!arm_not_operand (hi_op2, SImode))
1615    hi_op2 = force_reg (SImode, hi_op2);
1616  rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1617  if (CONST_INT_P (hi_op2))
1618    emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1619					GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1620					gen_rtx_LTU (SImode, ccreg, const0_rtx),
1621					gen_rtx_LTU (DImode, ccreg,
1622						     const0_rtx)));
1623  else
1624    emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1625				    gen_rtx_LTU (SImode, ccreg, const0_rtx),
1626				    gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1627  arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1628
1629  DONE;
1630})
1631
1632(define_insn "subsi3_compare1"
1633  [(set (reg:CC CC_REGNUM)
1634	(compare:CC
1635	  (match_operand:SI 1 "register_operand" "r")
1636	  (match_operand:SI 2 "register_operand" "r")))
1637   (set (match_operand:SI 0 "register_operand" "=r")
1638	(minus:SI (match_dup 1) (match_dup 2)))]
1639  "TARGET_32BIT"
1640  "subs%?\\t%0, %1, %2"
1641  [(set_attr "conds" "set")
1642   (set_attr "type" "alus_sreg")]
1643)
1644
1645(define_insn "subvsi3"
1646  [(set (reg:CC_V CC_REGNUM)
1647	(compare:CC_V
1648	 (minus:DI
1649	  (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1650	  (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1651	 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1652   (set (match_operand:SI 0 "s_register_operand" "=l,r")
1653	(minus:SI (match_dup 1) (match_dup 2)))]
1654  "TARGET_32BIT"
1655  "subs%?\\t%0, %1, %2"
1656  [(set_attr "conds" "set")
1657   (set_attr "arch" "t2,*")
1658   (set_attr "length" "2,4")
1659   (set_attr "type" "alus_sreg")]
1660)
1661
1662(define_insn "subvsi3_imm1"
1663  [(set (reg:CC_V CC_REGNUM)
1664	(compare:CC_V
1665	 (minus:DI
1666	  (match_operand 1 "arm_immediate_operand" "I")
1667	  (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1668	 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1669   (set (match_operand:SI 0 "s_register_operand" "=r")
1670	(minus:SI (match_dup 1) (match_dup 2)))]
1671  "TARGET_32BIT"
1672  "rsbs%?\\t%0, %2, %1"
1673  [(set_attr "conds" "set")
1674   (set_attr "type" "alus_imm")]
1675)
1676
1677(define_insn "subsi3_carryin"
1678  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1679	(minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1680			    (match_operand:SI 2 "s_register_operand" "r,r,r"))
1681		  (match_operand:SI 3 "arm_borrow_operation" "")))]
1682  "TARGET_32BIT"
1683  "@
1684   sbc%?\\t%0, %1, %2
1685   rsc%?\\t%0, %2, %1
1686   sbc%?\\t%0, %2, %2, lsl #1"
1687  [(set_attr "conds" "use")
1688   (set_attr "arch" "*,a,t2")
1689   (set_attr "predicable" "yes")
1690   (set_attr "type" "adc_reg,adc_imm,alu_shift_imm_lsl_1to4")]
1691)
1692
1693;; Special canonicalization of the above when operand1 == (const_int 1):
1694;; in this case the 'borrow' needs to treated like subtracting from the carry.
1695(define_insn "rsbsi_carryin_reg"
1696  [(set (match_operand:SI 0 "s_register_operand" "=r")
1697	(minus:SI (match_operand:SI 1 "arm_carry_operation" "")
1698		  (match_operand:SI 2 "s_register_operand" "r")))]
1699  "TARGET_ARM"
1700  "rsc%?\\t%0, %2, #1"
1701  [(set_attr "conds" "use")
1702   (set_attr "predicable" "yes")
1703   (set_attr "type" "adc_imm")]
1704)
1705
1706;; SBC performs Rn - Rm - ~C, but -Rm = ~Rm + 1 => Rn + ~Rm + 1 - ~C
1707;; => Rn + ~Rm + C, which is essentially ADC Rd, Rn, ~Rm
1708(define_insn "*add_not_cin"
1709  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1710	(plus:SI
1711	 (plus:SI (not:SI (match_operand:SI 1 "s_register_operand" "r,r"))
1712		  (match_operand:SI 3 "arm_carry_operation" ""))
1713	 (match_operand:SI 2 "arm_rhs_operand" "r,I")))]
1714  "TARGET_ARM || (TARGET_THUMB2 && !CONST_INT_P (operands[2]))"
1715  "@
1716   sbc%?\\t%0, %2, %1
1717   rsc%?\\t%0, %1, %2"
1718  [(set_attr "conds" "use")
1719   (set_attr "predicable" "yes")
1720   (set_attr "arch" "*,a")
1721   (set_attr "type" "adc_reg,adc_imm")]
1722)
1723
1724;; On Arm we can also use the same trick when the non-inverted operand is
1725;; shifted, using RSC.
1726(define_insn "add_not_shift_cin"
1727  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1728	(plus:SI
1729	 (plus:SI (match_operator:SI 3 "shift_operator"
1730		   [(match_operand:SI 1 "s_register_operand" "r,r")
1731		    (match_operand:SI 2 "shift_amount_operand" "M,r")])
1732		  (not:SI (match_operand:SI 4 "s_register_operand" "r,r")))
1733	 (match_operand:SI 5 "arm_carry_operation" "")))]
1734  "TARGET_ARM"
1735  "rsc%?\\t%0, %4, %1%S3"
1736  [(set_attr "conds" "use")
1737   (set_attr "predicable" "yes")
1738   (set_attr "autodetect_type" "alu_shift_operator3")]
1739)
1740
1741(define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1742  [(set (reg:<CC_EXTEND> CC_REGNUM)
1743	(compare:<CC_EXTEND>
1744	 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1745	 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1746		  (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1747   (clobber (match_scratch:SI 0 "=l,r"))]
1748  "TARGET_32BIT"
1749  "sbcs\\t%0, %1, %2"
1750  [(set_attr "conds" "set")
1751   (set_attr "arch" "t2,*")
1752   (set_attr "length" "2,4")
1753   (set_attr "type" "adc_reg")]
1754)
1755
1756;; Similar to the above, but handling a constant which has a different
1757;; canonicalization.
1758(define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1759  [(set (reg:<CC_EXTEND> CC_REGNUM)
1760	(compare:<CC_EXTEND>
1761	 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1762	 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1763		  (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1764   (clobber (match_scratch:SI 0 "=l,r"))]
1765  "TARGET_32BIT"
1766  "@
1767   sbcs\\t%0, %1, %2
1768   adcs\\t%0, %1, #%B2"
1769  [(set_attr "conds" "set")
1770   (set_attr "type" "adc_imm")]
1771)
1772
1773;; Further canonicalization when the constant is zero.
1774(define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1775  [(set (reg:<CC_EXTEND> CC_REGNUM)
1776	(compare:<CC_EXTEND>
1777	 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1778	 (match_operand:DI 2 "arm_borrow_operation" "")))
1779   (clobber (match_scratch:SI 0 "=l,r"))]
1780  "TARGET_32BIT"
1781  "sbcs\\t%0, %1, #0"
1782  [(set_attr "conds" "set")
1783   (set_attr "type" "adc_imm")]
1784)
1785
1786(define_insn "*subsi3_carryin_const"
1787  [(set (match_operand:SI 0 "s_register_operand" "=r")
1788	(minus:SI (plus:SI
1789		   (match_operand:SI 1 "s_register_operand" "r")
1790		   (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1791		  (match_operand:SI 3 "arm_borrow_operation" "")))]
1792  "TARGET_32BIT"
1793  "sbc\\t%0, %1, #%n2"
1794  [(set_attr "conds" "use")
1795   (set_attr "type" "adc_imm")]
1796)
1797
1798(define_insn "*subsi3_carryin_const0"
1799  [(set (match_operand:SI 0 "s_register_operand" "=r")
1800	(minus:SI (match_operand:SI 1 "s_register_operand" "r")
1801		  (match_operand:SI 2 "arm_borrow_operation" "")))]
1802  "TARGET_32BIT"
1803  "sbc\\t%0, %1, #0"
1804  [(set_attr "conds" "use")
1805   (set_attr "type" "adc_imm")]
1806)
1807
1808(define_insn "*subsi3_carryin_shift"
1809  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1810	(minus:SI (minus:SI
1811		   (match_operand:SI 1 "s_register_operand" "r,r")
1812		   (match_operator:SI 2 "shift_operator"
1813		    [(match_operand:SI 3 "s_register_operand" "r,r")
1814		     (match_operand:SI 4 "shift_amount_operand" "M,r")]))
1815		  (match_operand:SI 5 "arm_borrow_operation" "")))]
1816  "TARGET_32BIT"
1817  "sbc%?\\t%0, %1, %3%S2"
1818  [(set_attr "conds" "use")
1819   (set_attr "arch" "32,a")
1820   (set_attr "shift" "3")
1821   (set_attr "predicable" "yes")
1822   (set_attr "autodetect_type" "alu_shift_operator2")]
1823)
1824
1825(define_insn "*subsi3_carryin_shift_alt"
1826  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1827	(minus:SI (minus:SI
1828		   (match_operand:SI 1 "s_register_operand" "r,r")
1829		   (match_operand:SI 5 "arm_borrow_operation" ""))
1830		  (match_operator:SI 2 "shift_operator"
1831		   [(match_operand:SI 3 "s_register_operand" "r,r")
1832		    (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
1833  "TARGET_32BIT"
1834  "sbc%?\\t%0, %1, %3%S2"
1835  [(set_attr "conds" "use")
1836   (set_attr "arch" "32,a")
1837   (set_attr "shift" "3")
1838   (set_attr "predicable" "yes")
1839   (set_attr "autodetect_type" "alu_shift_operator2")]
1840)
1841
1842;; No RSC in Thumb2
1843(define_insn "*rsbsi3_carryin_shift"
1844  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1845	(minus:SI (minus:SI
1846		   (match_operator:SI 2 "shift_operator"
1847		    [(match_operand:SI 3 "s_register_operand" "r,r")
1848		     (match_operand:SI 4 "shift_amount_operand" "M,r")])
1849		   (match_operand:SI 1 "s_register_operand" "r,r"))
1850		  (match_operand:SI 5 "arm_borrow_operation" "")))]
1851  "TARGET_ARM"
1852  "rsc%?\\t%0, %1, %3%S2"
1853  [(set_attr "conds" "use")
1854   (set_attr "predicable" "yes")
1855   (set_attr "autodetect_type" "alu_shift_operator2")]
1856)
1857
1858(define_insn "*rsbsi3_carryin_shift_alt"
1859  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1860	(minus:SI (minus:SI
1861		   (match_operator:SI 2 "shift_operator"
1862		    [(match_operand:SI 3 "s_register_operand" "r,r")
1863		     (match_operand:SI 4 "shift_amount_operand" "M,r")])
1864		    (match_operand:SI 5 "arm_borrow_operation" ""))
1865		  (match_operand:SI 1 "s_register_operand" "r,r")))]
1866  "TARGET_ARM"
1867  "rsc%?\\t%0, %1, %3%S2"
1868  [(set_attr "conds" "use")
1869   (set_attr "predicable" "yes")
1870   (set_attr "autodetect_type" "alu_shift_operator2")]
1871)
1872
1873; transform ((x << y) - 1) to ~(~(x-1) << y)  Where X is a constant.
1874(define_split
1875  [(set (match_operand:SI 0 "s_register_operand" "")
1876	(plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1877			    (match_operand:SI 2 "s_register_operand" ""))
1878		 (const_int -1)))
1879   (clobber (match_operand:SI 3 "s_register_operand" ""))]
1880  "TARGET_32BIT"
1881  [(set (match_dup 3) (match_dup 1))
1882   (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1883  "
1884  operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1885")
1886
1887(define_expand "addsf3"
1888  [(set (match_operand:SF          0 "s_register_operand")
1889	(plus:SF (match_operand:SF 1 "s_register_operand")
1890		 (match_operand:SF 2 "s_register_operand")))]
1891  "TARGET_32BIT && TARGET_HARD_FLOAT"
1892  "
1893")
1894
1895(define_expand "adddf3"
1896  [(set (match_operand:DF          0 "s_register_operand")
1897	(plus:DF (match_operand:DF 1 "s_register_operand")
1898		 (match_operand:DF 2 "s_register_operand")))]
1899  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1900  "
1901")
1902
1903(define_expand "subdi3"
1904 [(parallel
1905   [(set (match_operand:DI            0 "s_register_operand")
1906	  (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1907		    (match_operand:DI 2 "s_register_operand")))
1908    (clobber (reg:CC CC_REGNUM))])]
1909  "TARGET_EITHER"
1910  "
1911  if (TARGET_THUMB1)
1912    {
1913      if (!REG_P (operands[1]))
1914	operands[1] = force_reg (DImode, operands[1]);
1915    }
1916  else
1917    {
1918      rtx lo_result, hi_result, lo_dest, hi_dest;
1919      rtx lo_op1, hi_op1, lo_op2, hi_op2;
1920      rtx condition;
1921
1922      /* Since operands[1] may be an integer, pass it second, so that
1923	 any necessary simplifications will be done on the decomposed
1924	 constant.  */
1925      arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1926			      &lo_op1, &hi_op1);
1927      lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1928      hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1929
1930      if (!arm_rhs_operand (lo_op1, SImode))
1931	lo_op1 = force_reg (SImode, lo_op1);
1932
1933      if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1934	  || !arm_rhs_operand (hi_op1, SImode))
1935	hi_op1 = force_reg (SImode, hi_op1);
1936
1937      rtx cc_reg;
1938      if (lo_op1 == const0_rtx)
1939	{
1940	  cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1941	  emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1942	}
1943      else if (CONST_INT_P (lo_op1))
1944	{
1945	  cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1946	  emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1947					  GEN_INT (~UINTVAL (lo_op1))));
1948	}
1949      else
1950	{
1951	  cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1952	  emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1953	}
1954
1955      condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1956
1957      if (hi_op1 == const0_rtx)
1958        emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1959      else
1960	emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1961
1962      if (lo_result != lo_dest)
1963	emit_move_insn (lo_result, lo_dest);
1964
1965      if (hi_result != hi_dest)
1966	emit_move_insn (hi_result, hi_dest);
1967
1968      DONE;
1969    }
1970  "
1971)
1972
1973(define_expand "subsi3"
1974  [(set (match_operand:SI           0 "s_register_operand")
1975	(minus:SI (match_operand:SI 1 "reg_or_int_operand")
1976		  (match_operand:SI 2 "s_register_operand")))]
1977  "TARGET_EITHER"
1978  "
1979  if (CONST_INT_P (operands[1]))
1980    {
1981      if (TARGET_32BIT)
1982        {
1983	  if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1984	    operands[1] = force_reg (SImode, operands[1]);
1985	  else
1986	    {
1987	      arm_split_constant (MINUS, SImode, NULL_RTX,
1988				  INTVAL (operands[1]), operands[0],
1989				  operands[2],
1990				  optimize && can_create_pseudo_p ());
1991	      DONE;
1992	    }
1993	}
1994      else /* TARGET_THUMB1 */
1995        operands[1] = force_reg (SImode, operands[1]);
1996    }
1997  "
1998)
1999
2000; ??? Check Thumb-2 split length
2001(define_insn_and_split "*arm_subsi3_insn"
2002  [(set (match_operand:SI           0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
2003	(minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
2004		  (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
2005  "TARGET_32BIT"
2006  "@
2007   sub%?\\t%0, %1, %2
2008   sub%?\\t%0, %2
2009   sub%?\\t%0, %1, %2
2010   rsb%?\\t%0, %2, %1
2011   rsb%?\\t%0, %2, %1
2012   sub%?\\t%0, %1, %2
2013   sub%?\\t%0, %1, %2
2014   sub%?\\t%0, %1, %2
2015   #"
2016  "&& (CONST_INT_P (operands[1])
2017       && !const_ok_for_arm (INTVAL (operands[1])))"
2018  [(clobber (const_int 0))]
2019  "
2020  arm_split_constant (MINUS, SImode, curr_insn,
2021                      INTVAL (operands[1]), operands[0], operands[2], 0);
2022  DONE;
2023  "
2024  [(set_attr "length" "4,4,4,4,4,4,4,4,16")
2025   (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
2026   (set_attr "predicable" "yes")
2027   (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
2028   (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
2029)
2030
2031(define_peephole2
2032  [(match_scratch:SI 3 "r")
2033   (set (match_operand:SI 0 "arm_general_register_operand" "")
2034	(minus:SI (match_operand:SI 1 "const_int_operand" "")
2035		  (match_operand:SI 2 "arm_general_register_operand" "")))]
2036  "TARGET_32BIT
2037   && !const_ok_for_arm (INTVAL (operands[1]))
2038   && const_ok_for_arm (~INTVAL (operands[1]))"
2039  [(set (match_dup 3) (match_dup 1))
2040   (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
2041  ""
2042)
2043
2044(define_insn "subsi3_compare0"
2045  [(set (reg:CC_NZ CC_REGNUM)
2046	(compare:CC_NZ
2047	 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2048		   (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
2049	 (const_int 0)))
2050   (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2051	(minus:SI (match_dup 1) (match_dup 2)))]
2052  "TARGET_32BIT"
2053  "@
2054   subs%?\\t%0, %1, %2
2055   subs%?\\t%0, %1, %2
2056   rsbs%?\\t%0, %2, %1"
2057  [(set_attr "conds" "set")
2058   (set_attr "type"  "alus_imm,alus_sreg,alus_sreg")]
2059)
2060
2061(define_insn "subsi3_compare"
2062  [(set (reg:CC CC_REGNUM)
2063	(compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2064		    (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
2065   (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2066	(minus:SI (match_dup 1) (match_dup 2)))]
2067  "TARGET_32BIT"
2068  "@
2069   subs%?\\t%0, %1, %2
2070   subs%?\\t%0, %1, %2
2071   rsbs%?\\t%0, %2, %1"
2072  [(set_attr "conds" "set")
2073   (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
2074)
2075
2076;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
2077;; rather than (0 cmp reg).  This gives the same results for unsigned
2078;; and equality compares which is what we mostly need here.
2079(define_insn "rsb_imm_compare"
2080  [(set (reg:CC_RSB CC_REGNUM)
2081	(compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2082			(match_operand 3 "const_int_operand" "")))
2083   (set (match_operand:SI 0 "s_register_operand" "=r")
2084	(minus:SI (match_operand 1 "arm_immediate_operand" "I")
2085		  (match_dup 2)))]
2086  "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
2087  "rsbs\\t%0, %2, %1"
2088  [(set_attr "conds" "set")
2089   (set_attr "type" "alus_imm")]
2090)
2091
2092;; Similarly, but the result is unused.
2093(define_insn "rsb_imm_compare_scratch"
2094  [(set (reg:CC_RSB CC_REGNUM)
2095	(compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2096			(match_operand 1 "arm_not_immediate_operand" "K")))
2097   (clobber (match_scratch:SI 0 "=r"))]
2098  "TARGET_32BIT"
2099  "rsbs\\t%0, %2, #%B1"
2100  [(set_attr "conds" "set")
2101   (set_attr "type" "alus_imm")]
2102)
2103
2104;; Compare the sum of a value plus a carry against a constant.  Uses
2105;; RSC, so the result is swapped.  Only available on Arm
2106(define_insn "rscsi3_<CC_EXTEND>out_scratch"
2107  [(set (reg:CC_SWP CC_REGNUM)
2108	(compare:CC_SWP
2109	 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2110		  (match_operand:DI 3 "arm_borrow_operation" ""))
2111	 (match_operand 1 "arm_immediate_operand" "I")))
2112   (clobber (match_scratch:SI 0 "=r"))]
2113  "TARGET_ARM"
2114  "rscs\\t%0, %2, %1"
2115  [(set_attr "conds" "set")
2116   (set_attr "type" "alus_imm")]
2117)
2118
2119(define_insn "usubvsi3_borrow"
2120  [(set (reg:CC_B CC_REGNUM)
2121	(compare:CC_B
2122	 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2123	 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2124	          (zero_extend:DI
2125		   (match_operand:SI 2 "s_register_operand" "l,r")))))
2126   (set (match_operand:SI 0 "s_register_operand" "=l,r")
2127	(minus:SI (match_dup 1)
2128		  (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2129			   (match_dup 2))))]
2130  "TARGET_32BIT"
2131  "sbcs%?\\t%0, %1, %2"
2132  [(set_attr "conds" "set")
2133   (set_attr "arch" "t2,*")
2134   (set_attr "length" "2,4")]
2135)
2136
2137(define_insn "usubvsi3_borrow_imm"
2138  [(set (reg:CC_B CC_REGNUM)
2139	(compare:CC_B
2140	 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2141	 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2142		  (match_operand:DI 3 "const_int_operand" "n,n"))))
2143   (set (match_operand:SI 0 "s_register_operand" "=r,r")
2144	(minus:SI (match_dup 1)
2145		  (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2146			   (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2147  "TARGET_32BIT
2148   && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2149  "@
2150  sbcs%?\\t%0, %1, %2
2151  adcs%?\\t%0, %1, #%B2"
2152  [(set_attr "conds" "set")
2153   (set_attr "type" "alus_imm")]
2154)
2155
2156(define_insn "subvsi3_borrow"
2157  [(set (reg:CC_V CC_REGNUM)
2158	(compare:CC_V
2159	 (minus:DI
2160	  (minus:DI
2161	   (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2162	   (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2163	  (match_operand:DI 4 "arm_borrow_operation" ""))
2164	 (sign_extend:DI
2165	  (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2166		    (match_operand:SI 3 "arm_borrow_operation" "")))))
2167   (set (match_operand:SI 0 "s_register_operand" "=l,r")
2168	(minus:SI (minus:SI (match_dup 1) (match_dup 2))
2169		  (match_dup 3)))]
2170  "TARGET_32BIT"
2171  "sbcs%?\\t%0, %1, %2"
2172  [(set_attr "conds" "set")
2173   (set_attr "arch" "t2,*")
2174   (set_attr "length" "2,4")]
2175)
2176
2177(define_insn "subvsi3_borrow_imm"
2178  [(set (reg:CC_V CC_REGNUM)
2179	(compare:CC_V
2180	 (minus:DI
2181	  (minus:DI
2182	   (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2183	   (match_operand 2 "arm_adcimm_operand" "I,K"))
2184	  (match_operand:DI 4 "arm_borrow_operation" ""))
2185	 (sign_extend:DI
2186	  (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2187		    (match_operand:SI 3 "arm_borrow_operation" "")))))
2188   (set (match_operand:SI 0 "s_register_operand" "=r,r")
2189	(minus:SI (minus:SI (match_dup 1) (match_dup 2))
2190		  (match_dup 3)))]
2191  "TARGET_32BIT
2192   && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2193  "@
2194  sbcs%?\\t%0, %1, %2
2195  adcs%?\\t%0, %1, #%B2"
2196  [(set_attr "conds" "set")
2197   (set_attr "type" "alus_imm")]
2198)
2199
2200(define_expand "subsf3"
2201  [(set (match_operand:SF           0 "s_register_operand")
2202	(minus:SF (match_operand:SF 1 "s_register_operand")
2203		  (match_operand:SF 2 "s_register_operand")))]
2204  "TARGET_32BIT && TARGET_HARD_FLOAT"
2205  "
2206")
2207
2208(define_expand "subdf3"
2209  [(set (match_operand:DF           0 "s_register_operand")
2210	(minus:DF (match_operand:DF 1 "s_register_operand")
2211		  (match_operand:DF 2 "s_register_operand")))]
2212  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2213  "
2214")
2215
2216
2217;; Multiplication insns
2218
2219(define_expand "mulhi3"
2220  [(set (match_operand:HI 0 "s_register_operand")
2221	(mult:HI (match_operand:HI 1 "s_register_operand")
2222		 (match_operand:HI 2 "s_register_operand")))]
2223  "TARGET_DSP_MULTIPLY"
2224  "
2225  {
2226    rtx result = gen_reg_rtx (SImode);
2227    emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2228    emit_move_insn (operands[0], gen_lowpart (HImode, result));
2229    DONE;
2230  }"
2231)
2232
2233(define_expand "mulsi3"
2234  [(set (match_operand:SI          0 "s_register_operand")
2235	(mult:SI (match_operand:SI 2 "s_register_operand")
2236		 (match_operand:SI 1 "s_register_operand")))]
2237  "TARGET_EITHER"
2238  ""
2239)
2240
2241;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2242(define_insn "*mul"
2243  [(set (match_operand:SI          0 "s_register_operand" "=l,r,&r,&r")
2244	(mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2245		 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2246  "TARGET_32BIT"
2247  "mul%?\\t%0, %2, %1"
2248  [(set_attr "type" "mul")
2249   (set_attr "predicable" "yes")
2250   (set_attr "arch" "t2,v6,nov6,nov6")
2251   (set_attr "length" "4")
2252   (set_attr "predicable_short_it" "yes,no,*,*")]
2253)
2254
2255;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2256;; reusing the same register.
2257
2258(define_insn "*mla"
2259  [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2260	(plus:SI
2261	  (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2262		   (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2263	  (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2264  "TARGET_32BIT"
2265  "mla%?\\t%0, %3, %2, %1"
2266  [(set_attr "type" "mla")
2267   (set_attr "predicable" "yes")
2268   (set_attr "arch" "v6,nov6,nov6,nov6")]
2269)
2270
2271(define_insn "*mls"
2272  [(set (match_operand:SI 0 "s_register_operand" "=r")
2273	(minus:SI
2274	  (match_operand:SI 1 "s_register_operand" "r")
2275	  (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2276		   (match_operand:SI 2 "s_register_operand" "r"))))]
2277  "TARGET_32BIT && arm_arch_thumb2"
2278  "mls%?\\t%0, %3, %2, %1"
2279  [(set_attr "type" "mla")
2280   (set_attr "predicable" "yes")]
2281)
2282
2283(define_insn "*mulsi3_compare0"
2284  [(set (reg:CC_NZ CC_REGNUM)
2285	(compare:CC_NZ (mult:SI
2286			  (match_operand:SI 2 "s_register_operand" "r,r")
2287			  (match_operand:SI 1 "s_register_operand" "%0,r"))
2288			 (const_int 0)))
2289   (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2290	(mult:SI (match_dup 2) (match_dup 1)))]
2291  "TARGET_ARM && !arm_arch6"
2292  "muls%?\\t%0, %2, %1"
2293  [(set_attr "conds" "set")
2294   (set_attr "type" "muls")]
2295)
2296
2297(define_insn "*mulsi3_compare0_v6"
2298  [(set (reg:CC_NZ CC_REGNUM)
2299	(compare:CC_NZ (mult:SI
2300			  (match_operand:SI 2 "s_register_operand" "r")
2301			  (match_operand:SI 1 "s_register_operand" "r"))
2302			 (const_int 0)))
2303   (set (match_operand:SI 0 "s_register_operand" "=r")
2304	(mult:SI (match_dup 2) (match_dup 1)))]
2305  "TARGET_ARM && arm_arch6 && optimize_size"
2306  "muls%?\\t%0, %2, %1"
2307  [(set_attr "conds" "set")
2308   (set_attr "type" "muls")]
2309)
2310
2311(define_insn "*mulsi_compare0_scratch"
2312  [(set (reg:CC_NZ CC_REGNUM)
2313	(compare:CC_NZ (mult:SI
2314			  (match_operand:SI 2 "s_register_operand" "r,r")
2315			  (match_operand:SI 1 "s_register_operand" "%0,r"))
2316			 (const_int 0)))
2317   (clobber (match_scratch:SI 0 "=&r,&r"))]
2318  "TARGET_ARM && !arm_arch6"
2319  "muls%?\\t%0, %2, %1"
2320  [(set_attr "conds" "set")
2321   (set_attr "type" "muls")]
2322)
2323
2324(define_insn "*mulsi_compare0_scratch_v6"
2325  [(set (reg:CC_NZ CC_REGNUM)
2326	(compare:CC_NZ (mult:SI
2327			  (match_operand:SI 2 "s_register_operand" "r")
2328			  (match_operand:SI 1 "s_register_operand" "r"))
2329			 (const_int 0)))
2330   (clobber (match_scratch:SI 0 "=r"))]
2331  "TARGET_ARM && arm_arch6 && optimize_size"
2332  "muls%?\\t%0, %2, %1"
2333  [(set_attr "conds" "set")
2334   (set_attr "type" "muls")]
2335)
2336
2337(define_insn "*mulsi3addsi_compare0"
2338  [(set (reg:CC_NZ CC_REGNUM)
2339	(compare:CC_NZ
2340	 (plus:SI (mult:SI
2341		   (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2342		   (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2343		  (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2344	 (const_int 0)))
2345   (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2346	(plus:SI (mult:SI (match_dup 2) (match_dup 1))
2347		 (match_dup 3)))]
2348  "TARGET_ARM && arm_arch6"
2349  "mlas%?\\t%0, %2, %1, %3"
2350  [(set_attr "conds" "set")
2351   (set_attr "type" "mlas")]
2352)
2353
2354(define_insn "*mulsi3addsi_compare0_v6"
2355  [(set (reg:CC_NZ CC_REGNUM)
2356	(compare:CC_NZ
2357	 (plus:SI (mult:SI
2358		   (match_operand:SI 2 "s_register_operand" "r")
2359		   (match_operand:SI 1 "s_register_operand" "r"))
2360		  (match_operand:SI 3 "s_register_operand" "r"))
2361	 (const_int 0)))
2362   (set (match_operand:SI 0 "s_register_operand" "=r")
2363	(plus:SI (mult:SI (match_dup 2) (match_dup 1))
2364		 (match_dup 3)))]
2365  "TARGET_ARM && arm_arch6 && optimize_size"
2366  "mlas%?\\t%0, %2, %1, %3"
2367  [(set_attr "conds" "set")
2368   (set_attr "type" "mlas")]
2369)
2370
2371(define_insn "*mulsi3addsi_compare0_scratch"
2372  [(set (reg:CC_NZ CC_REGNUM)
2373	(compare:CC_NZ
2374	 (plus:SI (mult:SI
2375		   (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2376		   (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2377		  (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2378	 (const_int 0)))
2379   (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2380  "TARGET_ARM && !arm_arch6"
2381  "mlas%?\\t%0, %2, %1, %3"
2382  [(set_attr "conds" "set")
2383   (set_attr "type" "mlas")]
2384)
2385
2386(define_insn "*mulsi3addsi_compare0_scratch_v6"
2387  [(set (reg:CC_NZ CC_REGNUM)
2388	(compare:CC_NZ
2389	 (plus:SI (mult:SI
2390		   (match_operand:SI 2 "s_register_operand" "r")
2391		   (match_operand:SI 1 "s_register_operand" "r"))
2392		  (match_operand:SI 3 "s_register_operand" "r"))
2393	 (const_int 0)))
2394   (clobber (match_scratch:SI 0 "=r"))]
2395  "TARGET_ARM && arm_arch6 && optimize_size"
2396  "mlas%?\\t%0, %2, %1, %3"
2397  [(set_attr "conds" "set")
2398   (set_attr "type" "mlas")]
2399)
2400
2401;; 32x32->64 widening multiply.
2402;; The only difference between the v3-5 and v6+ versions is the requirement
2403;; that the output does not overlap with either input.
2404
2405(define_expand "<Us>mulsidi3"
2406  [(set (match_operand:DI 0 "s_register_operand")
2407	(mult:DI
2408	 (SE:DI (match_operand:SI 1 "s_register_operand"))
2409	 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2410  "TARGET_32BIT"
2411  {
2412      emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2413			       gen_highpart (SImode, operands[0]),
2414			       operands[1], operands[2]));
2415      DONE;
2416  }
2417)
2418
2419(define_insn "<US>mull"
2420  [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2421	(mult:SI
2422	 (match_operand:SI 2 "s_register_operand" "%r,r")
2423	 (match_operand:SI 3 "s_register_operand" "r,r")))
2424   (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2425	(truncate:SI
2426	 (lshiftrt:DI
2427	  (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2428	  (const_int 32))))]
2429  "TARGET_32BIT"
2430  "<US>mull%?\\t%0, %1, %2, %3"
2431  [(set_attr "type" "umull")
2432   (set_attr "predicable" "yes")
2433   (set_attr "arch" "v6,nov6")]
2434)
2435
2436(define_expand "<Us>maddsidi4"
2437  [(set (match_operand:DI 0 "s_register_operand")
2438	(plus:DI
2439	 (mult:DI
2440	  (SE:DI (match_operand:SI 1 "s_register_operand"))
2441	  (SE:DI (match_operand:SI 2 "s_register_operand")))
2442	 (match_operand:DI 3 "s_register_operand")))]
2443  "TARGET_32BIT"
2444  {
2445      emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2446			       gen_lowpart (SImode, operands[3]),
2447			       gen_highpart (SImode, operands[0]),
2448			       gen_highpart (SImode, operands[3]),
2449			       operands[1], operands[2]));
2450      DONE;
2451  }
2452)
2453
2454(define_insn "<US>mlal"
2455  [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2456	(plus:SI
2457	 (mult:SI
2458	  (match_operand:SI 4 "s_register_operand" "%r,r")
2459	  (match_operand:SI 5 "s_register_operand" "r,r"))
2460	 (match_operand:SI 1 "s_register_operand" "0,0")))
2461   (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2462	(plus:SI
2463	 (truncate:SI
2464	  (lshiftrt:DI
2465	   (plus:DI
2466	    (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2467	    (zero_extend:DI (match_dup 1)))
2468	   (const_int 32)))
2469	 (match_operand:SI 3 "s_register_operand" "2,2")))]
2470  "TARGET_32BIT"
2471  "<US>mlal%?\\t%0, %2, %4, %5"
2472  [(set_attr "type" "umlal")
2473   (set_attr "predicable" "yes")
2474   (set_attr "arch" "v6,nov6")]
2475)
2476
2477(define_expand "<US>mulsi3_highpart"
2478  [(parallel
2479    [(set (match_operand:SI 0 "s_register_operand")
2480	  (truncate:SI
2481	   (lshiftrt:DI
2482	    (mult:DI
2483	     (SE:DI (match_operand:SI 1 "s_register_operand"))
2484	     (SE:DI (match_operand:SI 2 "s_register_operand")))
2485	    (const_int 32))))
2486     (clobber (match_scratch:SI 3 ""))])]
2487  "TARGET_32BIT"
2488  ""
2489)
2490
2491(define_insn "*<US>mull_high"
2492  [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2493	(truncate:SI
2494	 (lshiftrt:DI
2495	  (mult:DI
2496	   (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2497	   (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2498	  (const_int 32))))
2499   (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2500  "TARGET_32BIT"
2501  "<US>mull%?\\t%3, %0, %2, %1"
2502  [(set_attr "type" "umull")
2503   (set_attr "predicable" "yes")
2504   (set_attr "arch" "v6,nov6,nov6")]
2505)
2506
2507(define_insn "mulhisi3"
2508  [(set (match_operand:SI 0 "s_register_operand" "=r")
2509	(mult:SI (sign_extend:SI
2510		  (match_operand:HI 1 "s_register_operand" "%r"))
2511		 (sign_extend:SI
2512		  (match_operand:HI 2 "s_register_operand" "r"))))]
2513  "TARGET_DSP_MULTIPLY"
2514  "smulbb%?\\t%0, %1, %2"
2515  [(set_attr "type" "smulxy")
2516   (set_attr "predicable" "yes")]
2517)
2518
2519(define_insn "*mulhisi3tb"
2520  [(set (match_operand:SI 0 "s_register_operand" "=r")
2521	(mult:SI (ashiftrt:SI
2522		  (match_operand:SI 1 "s_register_operand" "r")
2523		  (const_int 16))
2524		 (sign_extend:SI
2525		  (match_operand:HI 2 "s_register_operand" "r"))))]
2526  "TARGET_DSP_MULTIPLY"
2527  "smultb%?\\t%0, %1, %2"
2528  [(set_attr "type" "smulxy")
2529   (set_attr "predicable" "yes")]
2530)
2531
2532(define_insn "*mulhisi3bt"
2533  [(set (match_operand:SI 0 "s_register_operand" "=r")
2534	(mult:SI (sign_extend:SI
2535		  (match_operand:HI 1 "s_register_operand" "r"))
2536		 (ashiftrt:SI
2537		  (match_operand:SI 2 "s_register_operand" "r")
2538		  (const_int 16))))]
2539  "TARGET_DSP_MULTIPLY"
2540  "smulbt%?\\t%0, %1, %2"
2541  [(set_attr "type" "smulxy")
2542   (set_attr "predicable" "yes")]
2543)
2544
2545(define_insn "*mulhisi3tt"
2546  [(set (match_operand:SI 0 "s_register_operand" "=r")
2547	(mult:SI (ashiftrt:SI
2548		  (match_operand:SI 1 "s_register_operand" "r")
2549		  (const_int 16))
2550		 (ashiftrt:SI
2551		  (match_operand:SI 2 "s_register_operand" "r")
2552		  (const_int 16))))]
2553  "TARGET_DSP_MULTIPLY"
2554  "smultt%?\\t%0, %1, %2"
2555  [(set_attr "type" "smulxy")
2556   (set_attr "predicable" "yes")]
2557)
2558
2559(define_expand "maddhisi4"
2560  [(set (match_operand:SI 0 "s_register_operand")
2561	(plus:SI (mult:SI (sign_extend:SI
2562			   (match_operand:HI 1 "s_register_operand"))
2563			  (sign_extend:SI
2564			   (match_operand:HI 2 "s_register_operand")))
2565		 (match_operand:SI 3 "s_register_operand")))]
2566  "TARGET_DSP_MULTIPLY"
2567  {
2568    /* If this function reads the Q bit from ACLE intrinsics break up the
2569       multiplication and accumulation as an overflow during accumulation will
2570       clobber the Q flag.  */
2571    if (ARM_Q_BIT_READ)
2572      {
2573	rtx tmp = gen_reg_rtx (SImode);
2574	emit_insn (gen_mulhisi3 (tmp, operands[1], operands[2]));
2575	emit_insn (gen_addsi3 (operands[0], tmp, operands[3]));
2576	DONE;
2577      }
2578  }
2579)
2580
2581(define_insn "*arm_maddhisi4"
2582  [(set (match_operand:SI 0 "s_register_operand" "=r")
2583	(plus:SI (mult:SI (sign_extend:SI
2584			   (match_operand:HI 1 "s_register_operand" "r"))
2585			  (sign_extend:SI
2586			   (match_operand:HI 2 "s_register_operand" "r")))
2587		 (match_operand:SI 3 "s_register_operand" "r")))]
2588  "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2589  "smlabb%?\\t%0, %1, %2, %3"
2590  [(set_attr "type" "smlaxy")
2591   (set_attr "predicable" "yes")]
2592)
2593
2594(define_insn "arm_smlabb_setq"
2595  [(set (match_operand:SI 0 "s_register_operand" "=r")
2596	(plus:SI (mult:SI (sign_extend:SI
2597			   (match_operand:HI 1 "s_register_operand" "r"))
2598			  (sign_extend:SI
2599			   (match_operand:HI 2 "s_register_operand" "r")))
2600		 (match_operand:SI 3 "s_register_operand" "r")))
2601   (set (reg:CC APSRQ_REGNUM)
2602	(unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2603  "TARGET_DSP_MULTIPLY"
2604  "smlabb%?\\t%0, %1, %2, %3"
2605  [(set_attr "type" "smlaxy")
2606   (set_attr "predicable" "yes")]
2607)
2608
2609(define_expand "arm_smlabb"
2610 [(match_operand:SI 0 "s_register_operand")
2611  (match_operand:SI 1 "s_register_operand")
2612  (match_operand:SI 2 "s_register_operand")
2613  (match_operand:SI 3 "s_register_operand")]
2614  "TARGET_DSP_MULTIPLY"
2615  {
2616    rtx mult1 = gen_lowpart (HImode, operands[1]);
2617    rtx mult2 = gen_lowpart (HImode, operands[2]);
2618    if (ARM_Q_BIT_READ)
2619      emit_insn (gen_arm_smlabb_setq (operands[0], mult1, mult2, operands[3]));
2620    else
2621      emit_insn (gen_maddhisi4 (operands[0], mult1, mult2, operands[3]));
2622    DONE;
2623  }
2624)
2625
2626;; Note: there is no maddhisi4ibt because this one is canonical form
2627(define_insn "maddhisi4tb"
2628  [(set (match_operand:SI 0 "s_register_operand" "=r")
2629	(plus:SI (mult:SI (ashiftrt:SI
2630			   (match_operand:SI 1 "s_register_operand" "r")
2631			   (const_int 16))
2632			  (sign_extend:SI
2633			   (match_operand:HI 2 "s_register_operand" "r")))
2634		 (match_operand:SI 3 "s_register_operand" "r")))]
2635  "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2636  "smlatb%?\\t%0, %1, %2, %3"
2637  [(set_attr "type" "smlaxy")
2638   (set_attr "predicable" "yes")]
2639)
2640
2641(define_insn "arm_smlatb_setq"
2642  [(set (match_operand:SI 0 "s_register_operand" "=r")
2643	(plus:SI (mult:SI (ashiftrt:SI
2644			   (match_operand:SI 1 "s_register_operand" "r")
2645			   (const_int 16))
2646			  (sign_extend:SI
2647			   (match_operand:HI 2 "s_register_operand" "r")))
2648		 (match_operand:SI 3 "s_register_operand" "r")))
2649   (set (reg:CC APSRQ_REGNUM)
2650	(unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2651  "TARGET_DSP_MULTIPLY"
2652  "smlatb%?\\t%0, %1, %2, %3"
2653  [(set_attr "type" "smlaxy")
2654   (set_attr "predicable" "yes")]
2655)
2656
2657(define_expand "arm_smlatb"
2658 [(match_operand:SI 0 "s_register_operand")
2659  (match_operand:SI 1 "s_register_operand")
2660  (match_operand:SI 2 "s_register_operand")
2661  (match_operand:SI 3 "s_register_operand")]
2662  "TARGET_DSP_MULTIPLY"
2663  {
2664    rtx mult2 = gen_lowpart (HImode, operands[2]);
2665    if (ARM_Q_BIT_READ)
2666      emit_insn (gen_arm_smlatb_setq (operands[0], operands[1],
2667				      mult2, operands[3]));
2668    else
2669      emit_insn (gen_maddhisi4tb (operands[0], operands[1],
2670				  mult2, operands[3]));
2671    DONE;
2672  }
2673)
2674
2675(define_insn "maddhisi4tt"
2676  [(set (match_operand:SI 0 "s_register_operand" "=r")
2677	(plus:SI (mult:SI (ashiftrt:SI
2678			   (match_operand:SI 1 "s_register_operand" "r")
2679			   (const_int 16))
2680			  (ashiftrt:SI
2681			   (match_operand:SI 2 "s_register_operand" "r")
2682			   (const_int 16)))
2683		 (match_operand:SI 3 "s_register_operand" "r")))]
2684  "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2685  "smlatt%?\\t%0, %1, %2, %3"
2686  [(set_attr "type" "smlaxy")
2687   (set_attr "predicable" "yes")]
2688)
2689
2690(define_insn "arm_smlatt_setq"
2691  [(set (match_operand:SI 0 "s_register_operand" "=r")
2692	(plus:SI (mult:SI (ashiftrt:SI
2693			   (match_operand:SI 1 "s_register_operand" "r")
2694			   (const_int 16))
2695			  (ashiftrt:SI
2696			   (match_operand:SI 2 "s_register_operand" "r")
2697			   (const_int 16)))
2698		 (match_operand:SI 3 "s_register_operand" "r")))
2699   (set (reg:CC APSRQ_REGNUM)
2700	(unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2701  "TARGET_DSP_MULTIPLY"
2702  "smlatt%?\\t%0, %1, %2, %3"
2703  [(set_attr "type" "smlaxy")
2704   (set_attr "predicable" "yes")]
2705)
2706
2707(define_expand "arm_smlatt"
2708 [(match_operand:SI 0 "s_register_operand")
2709  (match_operand:SI 1 "s_register_operand")
2710  (match_operand:SI 2 "s_register_operand")
2711  (match_operand:SI 3 "s_register_operand")]
2712  "TARGET_DSP_MULTIPLY"
2713  {
2714    if (ARM_Q_BIT_READ)
2715      emit_insn (gen_arm_smlatt_setq (operands[0], operands[1],
2716				      operands[2], operands[3]));
2717    else
2718      emit_insn (gen_maddhisi4tt (operands[0], operands[1],
2719				  operands[2], operands[3]));
2720    DONE;
2721  }
2722)
2723
2724(define_insn "maddhidi4"
2725  [(set (match_operand:DI 0 "s_register_operand" "=r")
2726	(plus:DI
2727	  (mult:DI (sign_extend:DI
2728		    (match_operand:HI 1 "s_register_operand" "r"))
2729		   (sign_extend:DI
2730		    (match_operand:HI 2 "s_register_operand" "r")))
2731	  (match_operand:DI 3 "s_register_operand" "0")))]
2732  "TARGET_DSP_MULTIPLY"
2733  "smlalbb%?\\t%Q0, %R0, %1, %2"
2734  [(set_attr "type" "smlalxy")
2735   (set_attr "predicable" "yes")])
2736
2737;; Note: there is no maddhidi4ibt because this one is canonical form
2738(define_insn "*maddhidi4tb"
2739  [(set (match_operand:DI 0 "s_register_operand" "=r")
2740	(plus:DI
2741	  (mult:DI (sign_extend:DI
2742		    (ashiftrt:SI
2743		     (match_operand:SI 1 "s_register_operand" "r")
2744		     (const_int 16)))
2745		   (sign_extend:DI
2746		    (match_operand:HI 2 "s_register_operand" "r")))
2747	  (match_operand:DI 3 "s_register_operand" "0")))]
2748  "TARGET_DSP_MULTIPLY"
2749  "smlaltb%?\\t%Q0, %R0, %1, %2"
2750  [(set_attr "type" "smlalxy")
2751   (set_attr "predicable" "yes")])
2752
2753(define_insn "*maddhidi4tt"
2754  [(set (match_operand:DI 0 "s_register_operand" "=r")
2755	(plus:DI
2756	  (mult:DI (sign_extend:DI
2757		    (ashiftrt:SI
2758		     (match_operand:SI 1 "s_register_operand" "r")
2759		     (const_int 16)))
2760		   (sign_extend:DI
2761		    (ashiftrt:SI
2762		     (match_operand:SI 2 "s_register_operand" "r")
2763		     (const_int 16))))
2764	  (match_operand:DI 3 "s_register_operand" "0")))]
2765  "TARGET_DSP_MULTIPLY"
2766  "smlaltt%?\\t%Q0, %R0, %1, %2"
2767  [(set_attr "type" "smlalxy")
2768   (set_attr "predicable" "yes")])
2769
2770(define_insn "arm_<smlaw_op><add_clobber_q_name>_insn"
2771  [(set (match_operand:SI 0 "s_register_operand" "=r")
2772	(unspec:SI
2773	   [(match_operand:SI 1 "s_register_operand" "r")
2774	    (match_operand:SI 2 "s_register_operand" "r")
2775	    (match_operand:SI 3 "s_register_operand" "r")]
2776	   SMLAWBT))]
2777  "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
2778  "<smlaw_op>%?\\t%0, %1, %2, %3"
2779  [(set_attr "type" "smlaxy")
2780   (set_attr "predicable" "yes")]
2781)
2782
2783(define_expand "arm_<smlaw_op>"
2784  [(set (match_operand:SI 0 "s_register_operand")
2785	(unspec:SI
2786	   [(match_operand:SI 1 "s_register_operand")
2787	    (match_operand:SI 2 "s_register_operand")
2788	    (match_operand:SI 3 "s_register_operand")]
2789	   SMLAWBT))]
2790  "TARGET_DSP_MULTIPLY"
2791  {
2792    if (ARM_Q_BIT_READ)
2793      emit_insn (gen_arm_<smlaw_op>_setq_insn (operands[0], operands[1],
2794					       operands[2], operands[3]));
2795    else
2796      emit_insn (gen_arm_<smlaw_op>_insn (operands[0], operands[1],
2797					  operands[2], operands[3]));
2798    DONE;
2799  }
2800)
2801
2802(define_expand "mulsf3"
2803  [(set (match_operand:SF          0 "s_register_operand")
2804	(mult:SF (match_operand:SF 1 "s_register_operand")
2805		 (match_operand:SF 2 "s_register_operand")))]
2806  "TARGET_32BIT && TARGET_HARD_FLOAT"
2807  "
2808")
2809
2810(define_expand "muldf3"
2811  [(set (match_operand:DF          0 "s_register_operand")
2812	(mult:DF (match_operand:DF 1 "s_register_operand")
2813		 (match_operand:DF 2 "s_register_operand")))]
2814  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2815  "
2816")
2817
2818;; Division insns
2819
2820(define_expand "divsf3"
2821  [(set (match_operand:SF 0 "s_register_operand")
2822	(div:SF (match_operand:SF 1 "s_register_operand")
2823		(match_operand:SF 2 "s_register_operand")))]
2824  "TARGET_32BIT && TARGET_HARD_FLOAT"
2825  "")
2826
2827(define_expand "divdf3"
2828  [(set (match_operand:DF 0 "s_register_operand")
2829	(div:DF (match_operand:DF 1 "s_register_operand")
2830		(match_operand:DF 2 "s_register_operand")))]
2831  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2832  "")
2833
2834
2835; Expand logical operations.  The mid-end expander does not split off memory
2836; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2837; So an explicit expander is needed to generate better code.
2838
2839(define_expand "<LOGICAL:optab>di3"
2840  [(set (match_operand:DI	  0 "s_register_operand")
2841	(LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2842		    (match_operand:DI 2 "arm_<optab>di_operand")))]
2843  "TARGET_32BIT"
2844  {
2845      rtx low  = simplify_gen_binary (<CODE>, SImode,
2846				      gen_lowpart (SImode, operands[1]),
2847				      gen_lowpart (SImode, operands[2]));
2848      rtx high = simplify_gen_binary (<CODE>, SImode,
2849				      gen_highpart (SImode, operands[1]),
2850				      gen_highpart_mode (SImode, DImode,
2851							 operands[2]));
2852
2853      emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2854      emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2855      DONE;
2856  }
2857)
2858
2859(define_expand "one_cmpldi2"
2860  [(set (match_operand:DI 0 "s_register_operand")
2861	(not:DI (match_operand:DI 1 "s_register_operand")))]
2862  "TARGET_32BIT"
2863  {
2864      rtx low  = simplify_gen_unary (NOT, SImode,
2865				     gen_lowpart (SImode, operands[1]),
2866				     SImode);
2867      rtx high = simplify_gen_unary (NOT, SImode,
2868				     gen_highpart_mode (SImode, DImode,
2869							operands[1]),
2870				     SImode);
2871
2872      emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2873      emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2874      DONE;
2875  }
2876)
2877
2878;; Split DImode and, ior, xor operations.  Simply perform the logical
2879;; operation on the upper and lower halves of the registers.
2880;; This is needed for atomic operations in arm_split_atomic_op.
2881;; Avoid splitting IWMMXT instructions.
2882(define_split
2883  [(set (match_operand:DI 0 "s_register_operand" "")
2884	(match_operator:DI 6 "logical_binary_operator"
2885	  [(match_operand:DI 1 "s_register_operand" "")
2886	   (match_operand:DI 2 "s_register_operand" "")]))]
2887  "TARGET_32BIT && reload_completed
2888   && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2889  [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2890   (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2891  "
2892  {
2893    operands[3] = gen_highpart (SImode, operands[0]);
2894    operands[0] = gen_lowpart (SImode, operands[0]);
2895    operands[4] = gen_highpart (SImode, operands[1]);
2896    operands[1] = gen_lowpart (SImode, operands[1]);
2897    operands[5] = gen_highpart (SImode, operands[2]);
2898    operands[2] = gen_lowpart (SImode, operands[2]);
2899  }"
2900)
2901
2902;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2903;; Unconditionally split since there is no SIMD DImode NOT pattern.
2904(define_split
2905  [(set (match_operand:DI 0 "s_register_operand")
2906	(not:DI (match_operand:DI 1 "s_register_operand")))]
2907  "TARGET_32BIT"
2908  [(set (match_dup 0) (not:SI (match_dup 1)))
2909   (set (match_dup 2) (not:SI (match_dup 3)))]
2910  "
2911  {
2912    operands[2] = gen_highpart (SImode, operands[0]);
2913    operands[0] = gen_lowpart (SImode, operands[0]);
2914    operands[3] = gen_highpart (SImode, operands[1]);
2915    operands[1] = gen_lowpart (SImode, operands[1]);
2916  }"
2917)
2918
2919(define_expand "andsi3"
2920  [(set (match_operand:SI         0 "s_register_operand")
2921	(and:SI (match_operand:SI 1 "s_register_operand")
2922		(match_operand:SI 2 "reg_or_int_operand")))]
2923  "TARGET_EITHER"
2924  "
2925  if (TARGET_32BIT)
2926    {
2927      if (CONST_INT_P (operands[2]))
2928        {
2929	  if (INTVAL (operands[2]) == 255 && arm_arch6)
2930	    {
2931	      operands[1] = convert_to_mode (QImode, operands[1], 1);
2932	      emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2933							 operands[1]));
2934	      DONE;
2935	    }
2936	  else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2937	    operands[2] = force_reg (SImode, operands[2]);
2938	  else
2939	    {
2940	      arm_split_constant (AND, SImode, NULL_RTX,
2941				  INTVAL (operands[2]), operands[0],
2942				  operands[1],
2943				  optimize && can_create_pseudo_p ());
2944
2945	      DONE;
2946	    }
2947        }
2948    }
2949  else /* TARGET_THUMB1 */
2950    {
2951      if (!CONST_INT_P (operands[2]))
2952        {
2953          rtx tmp = force_reg (SImode, operands[2]);
2954	  if (rtx_equal_p (operands[0], operands[1]))
2955	    operands[2] = tmp;
2956	  else
2957	    {
2958              operands[2] = operands[1];
2959              operands[1] = tmp;
2960	    }
2961        }
2962      else
2963        {
2964          int i;
2965
2966          if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2967  	    {
2968	      operands[2] = force_reg (SImode,
2969				       GEN_INT (~INTVAL (operands[2])));
2970
2971	      emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2972
2973	      DONE;
2974	    }
2975
2976          for (i = 9; i <= 31; i++)
2977	    {
2978	      if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2979	        {
2980	          emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2981			 	        const0_rtx));
2982	          DONE;
2983	        }
2984	      else if ((HOST_WIDE_INT_1 << i) - 1
2985		       == ~INTVAL (operands[2]))
2986	        {
2987	          rtx shift = GEN_INT (i);
2988	          rtx reg = gen_reg_rtx (SImode);
2989
2990	          emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2991	          emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2992
2993	          DONE;
2994	        }
2995	    }
2996
2997          operands[2] = force_reg (SImode, operands[2]);
2998        }
2999    }
3000  "
3001)
3002
3003; ??? Check split length for Thumb-2
3004(define_insn_and_split "*arm_andsi3_insn"
3005  [(set (match_operand:SI         0 "s_register_operand" "=r,l,r,r,r")
3006	(and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3007		(match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3008  "TARGET_32BIT"
3009  "@
3010   and%?\\t%0, %1, %2
3011   and%?\\t%0, %1, %2
3012   bic%?\\t%0, %1, #%B2
3013   and%?\\t%0, %1, %2
3014   #"
3015  "TARGET_32BIT
3016   && CONST_INT_P (operands[2])
3017   && !(const_ok_for_arm (INTVAL (operands[2]))
3018	|| const_ok_for_arm (~INTVAL (operands[2])))"
3019  [(clobber (const_int 0))]
3020  "
3021  arm_split_constant  (AND, SImode, curr_insn,
3022	               INTVAL (operands[2]), operands[0], operands[1], 0);
3023  DONE;
3024  "
3025  [(set_attr "length" "4,4,4,4,16")
3026   (set_attr "predicable" "yes")
3027   (set_attr "predicable_short_it" "no,yes,no,no,no")
3028   (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
3029)
3030
3031(define_insn "*andsi3_compare0"
3032  [(set (reg:CC_NZ CC_REGNUM)
3033	(compare:CC_NZ
3034	 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
3035		 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
3036	 (const_int 0)))
3037   (set (match_operand:SI          0 "s_register_operand" "=r,r,r")
3038	(and:SI (match_dup 1) (match_dup 2)))]
3039  "TARGET_32BIT"
3040  "@
3041   ands%?\\t%0, %1, %2
3042   bics%?\\t%0, %1, #%B2
3043   ands%?\\t%0, %1, %2"
3044  [(set_attr "conds" "set")
3045   (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3046)
3047
3048(define_insn "*andsi3_compare0_scratch"
3049  [(set (reg:CC_NZ CC_REGNUM)
3050	(compare:CC_NZ
3051	 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
3052		 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
3053	 (const_int 0)))
3054   (clobber (match_scratch:SI 2 "=X,r,X"))]
3055  "TARGET_32BIT"
3056  "@
3057   tst%?\\t%0, %1
3058   bics%?\\t%2, %0, #%B1
3059   tst%?\\t%0, %1"
3060  [(set_attr "conds" "set")
3061   (set_attr "type"  "logics_imm,logics_imm,logics_reg")]
3062)
3063
3064(define_insn "*zeroextractsi_compare0_scratch"
3065  [(set (reg:CC_NZ CC_REGNUM)
3066	(compare:CC_NZ (zero_extract:SI
3067			  (match_operand:SI 0 "s_register_operand" "r")
3068			  (match_operand 1 "const_int_operand" "n")
3069			  (match_operand 2 "const_int_operand" "n"))
3070			 (const_int 0)))]
3071  "TARGET_32BIT
3072  && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
3073      && INTVAL (operands[1]) > 0
3074      && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
3075      && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
3076  "*
3077  operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
3078			 << INTVAL (operands[2]));
3079  output_asm_insn (\"tst%?\\t%0, %1\", operands);
3080  return \"\";
3081  "
3082  [(set_attr "conds" "set")
3083   (set_attr "predicable" "yes")
3084   (set_attr "type" "logics_imm")]
3085)
3086
3087(define_insn_and_split "*ne_zeroextractsi"
3088  [(set (match_operand:SI 0 "s_register_operand" "=r")
3089	(ne:SI (zero_extract:SI
3090		(match_operand:SI 1 "s_register_operand" "r")
3091		(match_operand:SI 2 "const_int_operand" "n")
3092		(match_operand:SI 3 "const_int_operand" "n"))
3093	       (const_int 0)))
3094   (clobber (reg:CC CC_REGNUM))]
3095  "TARGET_32BIT
3096   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3097       && INTVAL (operands[2]) > 0
3098       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3099       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3100  "#"
3101  "TARGET_32BIT
3102   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3103       && INTVAL (operands[2]) > 0
3104       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3105       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3106  [(parallel [(set (reg:CC_NZ CC_REGNUM)
3107		   (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3108				    (const_int 0)))
3109	      (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3110   (set (match_dup 0)
3111	(if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3112			 (match_dup 0) (const_int 1)))]
3113  "
3114  operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3115			 << INTVAL (operands[3]));
3116  "
3117  [(set_attr "conds" "clob")
3118   (set (attr "length")
3119	(if_then_else (eq_attr "is_thumb" "yes")
3120		      (const_int 12)
3121		      (const_int 8)))
3122   (set_attr "type" "multiple")]
3123)
3124
3125(define_insn_and_split "*ne_zeroextractsi_shifted"
3126  [(set (match_operand:SI 0 "s_register_operand" "=r")
3127	(ne:SI (zero_extract:SI
3128		(match_operand:SI 1 "s_register_operand" "r")
3129		(match_operand:SI 2 "const_int_operand" "n")
3130		(const_int 0))
3131	       (const_int 0)))
3132   (clobber (reg:CC CC_REGNUM))]
3133  "TARGET_ARM"
3134  "#"
3135  "TARGET_ARM"
3136  [(parallel [(set (reg:CC_NZ CC_REGNUM)
3137		   (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3138				    (const_int 0)))
3139	      (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3140   (set (match_dup 0)
3141	(if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3142			 (match_dup 0) (const_int 1)))]
3143  "
3144  operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3145  "
3146  [(set_attr "conds" "clob")
3147   (set_attr "length" "8")
3148   (set_attr "type" "multiple")]
3149)
3150
3151(define_insn_and_split "*ite_ne_zeroextractsi"
3152  [(set (match_operand:SI 0 "s_register_operand" "=r")
3153	(if_then_else:SI (ne (zero_extract:SI
3154			      (match_operand:SI 1 "s_register_operand" "r")
3155			      (match_operand:SI 2 "const_int_operand" "n")
3156			      (match_operand:SI 3 "const_int_operand" "n"))
3157			     (const_int 0))
3158			 (match_operand:SI 4 "arm_not_operand" "rIK")
3159			 (const_int 0)))
3160   (clobber (reg:CC CC_REGNUM))]
3161  "TARGET_ARM
3162   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3163       && INTVAL (operands[2]) > 0
3164       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3165       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3166   && !reg_overlap_mentioned_p (operands[0], operands[4])"
3167  "#"
3168  "TARGET_ARM
3169   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3170       && INTVAL (operands[2]) > 0
3171       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3172       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3173   && !reg_overlap_mentioned_p (operands[0], operands[4])"
3174  [(parallel [(set (reg:CC_NZ CC_REGNUM)
3175		   (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3176				    (const_int 0)))
3177	      (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3178   (set (match_dup 0)
3179	(if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3180			 (match_dup 0) (match_dup 4)))]
3181  "
3182  operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3183			 << INTVAL (operands[3]));
3184  "
3185  [(set_attr "conds" "clob")
3186   (set_attr "length" "8")
3187   (set_attr "type" "multiple")]
3188)
3189
3190(define_insn_and_split "*ite_ne_zeroextractsi_shifted"
3191  [(set (match_operand:SI 0 "s_register_operand" "=r")
3192	(if_then_else:SI (ne (zero_extract:SI
3193			      (match_operand:SI 1 "s_register_operand" "r")
3194			      (match_operand:SI 2 "const_int_operand" "n")
3195			      (const_int 0))
3196			     (const_int 0))
3197			 (match_operand:SI 3 "arm_not_operand" "rIK")
3198			 (const_int 0)))
3199   (clobber (reg:CC CC_REGNUM))]
3200  "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3201  "#"
3202  "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3203  [(parallel [(set (reg:CC_NZ CC_REGNUM)
3204		   (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3205				    (const_int 0)))
3206	      (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3207   (set (match_dup 0)
3208	(if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3209			 (match_dup 0) (match_dup 3)))]
3210  "
3211  operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3212  "
3213  [(set_attr "conds" "clob")
3214   (set_attr "length" "8")
3215   (set_attr "type" "multiple")]
3216)
3217
3218;; ??? Use Thumb-2 has bitfield insert/extract instructions.
3219(define_split
3220  [(set (match_operand:SI 0 "s_register_operand" "")
3221	(match_operator:SI 1 "shiftable_operator"
3222	 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3223			   (match_operand:SI 3 "const_int_operand" "")
3224			   (match_operand:SI 4 "const_int_operand" ""))
3225	  (match_operand:SI 5 "s_register_operand" "")]))
3226   (clobber (match_operand:SI 6 "s_register_operand" ""))]
3227  "TARGET_ARM"
3228  [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3229   (set (match_dup 0)
3230	(match_op_dup 1
3231	 [(lshiftrt:SI (match_dup 6) (match_dup 4))
3232	  (match_dup 5)]))]
3233  "{
3234     HOST_WIDE_INT temp = INTVAL (operands[3]);
3235
3236     operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3237     operands[4] = GEN_INT (32 - temp);
3238   }"
3239)
3240
3241(define_split
3242  [(set (match_operand:SI 0 "s_register_operand" "")
3243	(match_operator:SI 1 "shiftable_operator"
3244	 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3245			   (match_operand:SI 3 "const_int_operand" "")
3246			   (match_operand:SI 4 "const_int_operand" ""))
3247	  (match_operand:SI 5 "s_register_operand" "")]))
3248   (clobber (match_operand:SI 6 "s_register_operand" ""))]
3249  "TARGET_ARM"
3250  [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3251   (set (match_dup 0)
3252	(match_op_dup 1
3253	 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3254	  (match_dup 5)]))]
3255  "{
3256     HOST_WIDE_INT temp = INTVAL (operands[3]);
3257
3258     operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3259     operands[4] = GEN_INT (32 - temp);
3260   }"
3261)
3262
3263;;; ??? This pattern is bogus.  If operand3 has bits outside the range
3264;;; represented by the bitfield, then this will produce incorrect results.
3265;;; Somewhere, the value needs to be truncated.  On targets like the m68k,
3266;;; which have a real bit-field insert instruction, the truncation happens
3267;;; in the bit-field insert instruction itself.  Since arm does not have a
3268;;; bit-field insert instruction, we would have to emit code here to truncate
3269;;; the value before we insert.  This loses some of the advantage of having
3270;;; this insv pattern, so this pattern needs to be reevalutated.
3271
3272(define_expand "insv"
3273  [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3274                      (match_operand 1 "general_operand")
3275                      (match_operand 2 "general_operand"))
3276        (match_operand 3 "reg_or_int_operand"))]
3277  "TARGET_ARM || arm_arch_thumb2"
3278  "
3279  {
3280    int start_bit = INTVAL (operands[2]);
3281    int width = INTVAL (operands[1]);
3282    HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3283    rtx target, subtarget;
3284
3285    if (arm_arch_thumb2)
3286      {
3287        if (unaligned_access && MEM_P (operands[0])
3288	    && s_register_operand (operands[3], GET_MODE (operands[3]))
3289	    && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3290	  {
3291	    rtx base_addr;
3292
3293	    if (BYTES_BIG_ENDIAN)
3294	      start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3295			  - start_bit;
3296
3297	    if (width == 32)
3298	      {
3299	        base_addr = adjust_address (operands[0], SImode,
3300					    start_bit / BITS_PER_UNIT);
3301		emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3302	      }
3303	    else
3304	      {
3305	        rtx tmp = gen_reg_rtx (HImode);
3306
3307	        base_addr = adjust_address (operands[0], HImode,
3308					    start_bit / BITS_PER_UNIT);
3309		emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3310		emit_insn (gen_unaligned_storehi (base_addr, tmp));
3311	      }
3312	    DONE;
3313	  }
3314	else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3315	  {
3316	    bool use_bfi = TRUE;
3317
3318	    if (CONST_INT_P (operands[3]))
3319	      {
3320		HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3321
3322		if (val == 0)
3323		  {
3324		    emit_insn (gen_insv_zero (operands[0], operands[1],
3325					      operands[2]));
3326		    DONE;
3327		  }
3328
3329		/* See if the set can be done with a single orr instruction.  */
3330		if (val == mask && const_ok_for_arm (val << start_bit))
3331		  use_bfi = FALSE;
3332	      }
3333
3334	    if (use_bfi)
3335	      {
3336		if (!REG_P (operands[3]))
3337		  operands[3] = force_reg (SImode, operands[3]);
3338
3339		emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3340					operands[3]));
3341		DONE;
3342	      }
3343	  }
3344	else
3345	  FAIL;
3346      }
3347
3348    if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3349      FAIL;
3350
3351    target = copy_rtx (operands[0]);
3352    /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3353       subreg as the final target.  */
3354    if (GET_CODE (target) == SUBREG)
3355      {
3356	subtarget = gen_reg_rtx (SImode);
3357	if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3358	    < GET_MODE_SIZE (SImode))
3359	  target = SUBREG_REG (target);
3360      }
3361    else
3362      subtarget = target;
3363
3364    if (CONST_INT_P (operands[3]))
3365      {
3366	/* Since we are inserting a known constant, we may be able to
3367	   reduce the number of bits that we have to clear so that
3368	   the mask becomes simple.  */
3369	/* ??? This code does not check to see if the new mask is actually
3370	   simpler.  It may not be.  */
3371	rtx op1 = gen_reg_rtx (SImode);
3372	/* ??? Truncate operand3 to fit in the bitfield.  See comment before
3373	   start of this pattern.  */
3374	HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3375	HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3376
3377	emit_insn (gen_andsi3 (op1, operands[0],
3378			       gen_int_mode (~mask2, SImode)));
3379	emit_insn (gen_iorsi3 (subtarget, op1,
3380			       gen_int_mode (op3_value << start_bit, SImode)));
3381      }
3382    else if (start_bit == 0
3383	     && !(const_ok_for_arm (mask)
3384		  || const_ok_for_arm (~mask)))
3385      {
3386	/* A Trick, since we are setting the bottom bits in the word,
3387	   we can shift operand[3] up, operand[0] down, OR them together
3388	   and rotate the result back again.  This takes 3 insns, and
3389	   the third might be mergeable into another op.  */
3390	/* The shift up copes with the possibility that operand[3] is
3391           wider than the bitfield.  */
3392	rtx op0 = gen_reg_rtx (SImode);
3393	rtx op1 = gen_reg_rtx (SImode);
3394
3395	emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3396	emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3397	emit_insn (gen_iorsi3  (op1, op1, op0));
3398	emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3399      }
3400    else if ((width + start_bit == 32)
3401	     && !(const_ok_for_arm (mask)
3402		  || const_ok_for_arm (~mask)))
3403      {
3404	/* Similar trick, but slightly less efficient.  */
3405
3406	rtx op0 = gen_reg_rtx (SImode);
3407	rtx op1 = gen_reg_rtx (SImode);
3408
3409	emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3410	emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3411	emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3412	emit_insn (gen_iorsi3 (subtarget, op1, op0));
3413      }
3414    else
3415      {
3416	rtx op0 = gen_int_mode (mask, SImode);
3417	rtx op1 = gen_reg_rtx (SImode);
3418	rtx op2 = gen_reg_rtx (SImode);
3419
3420	if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3421	  {
3422	    rtx tmp = gen_reg_rtx (SImode);
3423
3424	    emit_insn (gen_movsi (tmp, op0));
3425	    op0 = tmp;
3426	  }
3427
3428	/* Mask out any bits in operand[3] that are not needed.  */
3429	   emit_insn (gen_andsi3 (op1, operands[3], op0));
3430
3431	if (CONST_INT_P (op0)
3432	    && (const_ok_for_arm (mask << start_bit)
3433		|| const_ok_for_arm (~(mask << start_bit))))
3434	  {
3435	    op0 = gen_int_mode (~(mask << start_bit), SImode);
3436	    emit_insn (gen_andsi3 (op2, operands[0], op0));
3437	  }
3438	else
3439	  {
3440	    if (CONST_INT_P (op0))
3441	      {
3442		rtx tmp = gen_reg_rtx (SImode);
3443
3444		emit_insn (gen_movsi (tmp, op0));
3445		op0 = tmp;
3446	      }
3447
3448	    if (start_bit != 0)
3449	      emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3450
3451	    emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3452	  }
3453
3454	if (start_bit != 0)
3455          emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3456
3457	emit_insn (gen_iorsi3 (subtarget, op1, op2));
3458      }
3459
3460    if (subtarget != target)
3461      {
3462	/* If TARGET is still a SUBREG, then it must be wider than a word,
3463	   so we must be careful only to set the subword we were asked to.  */
3464	if (GET_CODE (target) == SUBREG)
3465	  emit_move_insn (target, subtarget);
3466	else
3467	  emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3468      }
3469
3470    DONE;
3471  }"
3472)
3473
3474(define_insn "insv_zero"
3475  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3476                         (match_operand:SI 1 "const_int_M_operand" "M")
3477                         (match_operand:SI 2 "const_int_M_operand" "M"))
3478        (const_int 0))]
3479  "arm_arch_thumb2"
3480  "bfc%?\t%0, %2, %1"
3481  [(set_attr "length" "4")
3482   (set_attr "predicable" "yes")
3483   (set_attr "type" "bfm")]
3484)
3485
3486(define_insn "insv_t2"
3487  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3488                         (match_operand:SI 1 "const_int_M_operand" "M")
3489                         (match_operand:SI 2 "const_int_M_operand" "M"))
3490        (match_operand:SI 3 "s_register_operand" "r"))]
3491  "arm_arch_thumb2"
3492  "bfi%?\t%0, %3, %2, %1"
3493  [(set_attr "length" "4")
3494   (set_attr "predicable" "yes")
3495   (set_attr "type" "bfm")]
3496)
3497
3498(define_insn "andsi_notsi_si"
3499  [(set (match_operand:SI 0 "s_register_operand" "=r")
3500	(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3501		(match_operand:SI 1 "s_register_operand" "r")))]
3502  "TARGET_32BIT"
3503  "bic%?\\t%0, %1, %2"
3504  [(set_attr "predicable" "yes")
3505   (set_attr "type" "logic_reg")]
3506)
3507
3508(define_insn "andsi_not_shiftsi_si"
3509  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3510	(and:SI (not:SI (match_operator:SI 4 "shift_operator"
3511			 [(match_operand:SI 2 "s_register_operand" "r,r")
3512			  (match_operand:SI 3 "shift_amount_operand" "M,r")]))
3513		(match_operand:SI 1 "s_register_operand" "r,r")))]
3514  "TARGET_32BIT"
3515  "bic%?\\t%0, %1, %2%S4"
3516  [(set_attr "predicable" "yes")
3517   (set_attr "shift" "2")
3518   (set_attr "arch" "32,a")
3519   (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3520)
3521
3522;; Shifted bics pattern used to set up CC status register and not reusing
3523;; bics output.  Pattern restricts Thumb2 shift operand as bics for Thumb2
3524;; does not support shift by register.
3525(define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3526  [(set (reg:CC_NZ CC_REGNUM)
3527	(compare:CC_NZ
3528		(and:SI (not:SI (match_operator:SI 0 "shift_operator"
3529			[(match_operand:SI 1 "s_register_operand" "r,r")
3530			 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3531			(match_operand:SI 3 "s_register_operand" "r,r"))
3532		(const_int 0)))
3533   (clobber (match_scratch:SI 4 "=r,r"))]
3534  "TARGET_32BIT"
3535  "bics%?\\t%4, %3, %1%S0"
3536  [(set_attr "predicable" "yes")
3537   (set_attr "arch" "32,a")
3538   (set_attr "conds" "set")
3539   (set_attr "shift" "1")
3540   (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3541)
3542
3543;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3544;; getting reused later.
3545(define_insn "andsi_not_shiftsi_si_scc"
3546  [(parallel [(set (reg:CC_NZ CC_REGNUM)
3547	(compare:CC_NZ
3548		(and:SI (not:SI (match_operator:SI 0 "shift_operator"
3549			[(match_operand:SI 1 "s_register_operand" "r,r")
3550			 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3551			(match_operand:SI 3 "s_register_operand" "r,r"))
3552		(const_int 0)))
3553	(set (match_operand:SI 4 "s_register_operand" "=r,r")
3554	     (and:SI (not:SI (match_op_dup 0
3555		     [(match_dup 1)
3556		      (match_dup 2)]))
3557		     (match_dup 3)))])]
3558  "TARGET_32BIT"
3559  "bics%?\\t%4, %3, %1%S0"
3560  [(set_attr "predicable" "yes")
3561   (set_attr "arch" "32,a")
3562   (set_attr "conds" "set")
3563   (set_attr "shift" "1")
3564   (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3565)
3566
3567(define_insn "*andsi_notsi_si_compare0"
3568  [(set (reg:CC_NZ CC_REGNUM)
3569	(compare:CC_NZ
3570	 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3571		 (match_operand:SI 1 "s_register_operand" "r"))
3572	 (const_int 0)))
3573   (set (match_operand:SI 0 "s_register_operand" "=r")
3574	(and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3575  "TARGET_32BIT"
3576  "bics\\t%0, %1, %2"
3577  [(set_attr "conds" "set")
3578   (set_attr "type" "logics_shift_reg")]
3579)
3580
3581(define_insn "*andsi_notsi_si_compare0_scratch"
3582  [(set (reg:CC_NZ CC_REGNUM)
3583	(compare:CC_NZ
3584	 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3585		 (match_operand:SI 1 "s_register_operand" "r"))
3586	 (const_int 0)))
3587   (clobber (match_scratch:SI 0 "=r"))]
3588  "TARGET_32BIT"
3589  "bics\\t%0, %1, %2"
3590  [(set_attr "conds" "set")
3591   (set_attr "type" "logics_shift_reg")]
3592)
3593
3594(define_expand "iorsi3"
3595  [(set (match_operand:SI         0 "s_register_operand")
3596	(ior:SI (match_operand:SI 1 "s_register_operand")
3597		(match_operand:SI 2 "reg_or_int_operand")))]
3598  "TARGET_EITHER"
3599  "
3600  if (CONST_INT_P (operands[2]))
3601    {
3602      if (TARGET_32BIT)
3603        {
3604	  if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3605	    operands[2] = force_reg (SImode, operands[2]);
3606	  else
3607	    {
3608	      arm_split_constant (IOR, SImode, NULL_RTX,
3609				  INTVAL (operands[2]), operands[0],
3610				  operands[1],
3611				  optimize && can_create_pseudo_p ());
3612	      DONE;
3613	    }
3614	}
3615      else /* TARGET_THUMB1 */
3616        {
3617          rtx tmp = force_reg (SImode, operands[2]);
3618	  if (rtx_equal_p (operands[0], operands[1]))
3619	    operands[2] = tmp;
3620	  else
3621	    {
3622              operands[2] = operands[1];
3623              operands[1] = tmp;
3624	    }
3625        }
3626    }
3627  "
3628)
3629
3630(define_insn_and_split "*iorsi3_insn"
3631  [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3632	(ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3633		(match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3634  "TARGET_32BIT"
3635  "@
3636   orr%?\\t%0, %1, %2
3637   orr%?\\t%0, %1, %2
3638   orn%?\\t%0, %1, #%B2
3639   orr%?\\t%0, %1, %2
3640   #"
3641  "TARGET_32BIT
3642   && CONST_INT_P (operands[2])
3643   && !(const_ok_for_arm (INTVAL (operands[2]))
3644        || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3645  [(clobber (const_int 0))]
3646{
3647  arm_split_constant (IOR, SImode, curr_insn,
3648                      INTVAL (operands[2]), operands[0], operands[1], 0);
3649  DONE;
3650}
3651  [(set_attr "length" "4,4,4,4,16")
3652   (set_attr "arch" "32,t2,t2,32,32")
3653   (set_attr "predicable" "yes")
3654   (set_attr "predicable_short_it" "no,yes,no,no,no")
3655   (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3656)
3657
3658(define_peephole2
3659  [(match_scratch:SI 3 "r")
3660   (set (match_operand:SI 0 "arm_general_register_operand" "")
3661	(ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3662		(match_operand:SI 2 "const_int_operand" "")))]
3663  "TARGET_ARM
3664   && !const_ok_for_arm (INTVAL (operands[2]))
3665   && const_ok_for_arm (~INTVAL (operands[2]))"
3666  [(set (match_dup 3) (match_dup 2))
3667   (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3668  ""
3669)
3670
3671(define_insn "*iorsi3_compare0"
3672  [(set (reg:CC_NZ CC_REGNUM)
3673	(compare:CC_NZ
3674	 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3675		 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3676	 (const_int 0)))
3677   (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3678	(ior:SI (match_dup 1) (match_dup 2)))]
3679  "TARGET_32BIT"
3680  "orrs%?\\t%0, %1, %2"
3681  [(set_attr "conds" "set")
3682   (set_attr "arch" "*,t2,*")
3683   (set_attr "length" "4,2,4")
3684   (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3685)
3686
3687(define_insn "*iorsi3_compare0_scratch"
3688  [(set (reg:CC_NZ CC_REGNUM)
3689	(compare:CC_NZ
3690	 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3691		 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3692	 (const_int 0)))
3693   (clobber (match_scratch:SI 0 "=r,l,r"))]
3694  "TARGET_32BIT"
3695  "orrs%?\\t%0, %1, %2"
3696  [(set_attr "conds" "set")
3697   (set_attr "arch" "*,t2,*")
3698   (set_attr "length" "4,2,4")
3699   (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3700)
3701
3702(define_expand "xorsi3"
3703  [(set (match_operand:SI         0 "s_register_operand")
3704	(xor:SI (match_operand:SI 1 "s_register_operand")
3705		(match_operand:SI 2 "reg_or_int_operand")))]
3706  "TARGET_EITHER"
3707  "if (CONST_INT_P (operands[2]))
3708    {
3709      if (TARGET_32BIT)
3710        {
3711	  if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3712	    operands[2] = force_reg (SImode, operands[2]);
3713	  else
3714	    {
3715	      arm_split_constant (XOR, SImode, NULL_RTX,
3716				  INTVAL (operands[2]), operands[0],
3717				  operands[1],
3718				  optimize && can_create_pseudo_p ());
3719	      DONE;
3720	    }
3721	}
3722      else /* TARGET_THUMB1 */
3723        {
3724          rtx tmp = force_reg (SImode, operands[2]);
3725	  if (rtx_equal_p (operands[0], operands[1]))
3726	    operands[2] = tmp;
3727	  else
3728	    {
3729              operands[2] = operands[1];
3730              operands[1] = tmp;
3731	    }
3732        }
3733    }"
3734)
3735
3736(define_insn_and_split "*arm_xorsi3"
3737  [(set (match_operand:SI         0 "s_register_operand" "=r,l,r,r")
3738	(xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3739		(match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3740  "TARGET_32BIT"
3741  "@
3742   eor%?\\t%0, %1, %2
3743   eor%?\\t%0, %1, %2
3744   eor%?\\t%0, %1, %2
3745   #"
3746  "TARGET_32BIT
3747   && CONST_INT_P (operands[2])
3748   && !const_ok_for_arm (INTVAL (operands[2]))"
3749  [(clobber (const_int 0))]
3750{
3751  arm_split_constant (XOR, SImode, curr_insn,
3752                      INTVAL (operands[2]), operands[0], operands[1], 0);
3753  DONE;
3754}
3755  [(set_attr "length" "4,4,4,16")
3756   (set_attr "predicable" "yes")
3757   (set_attr "predicable_short_it" "no,yes,no,no")
3758   (set_attr "type"  "logic_imm,logic_reg,logic_reg,multiple")]
3759)
3760
3761(define_insn "*xorsi3_compare0"
3762  [(set (reg:CC_NZ CC_REGNUM)
3763	(compare:CC_NZ (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3764				 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3765			 (const_int 0)))
3766   (set (match_operand:SI 0 "s_register_operand" "=r,r")
3767	(xor:SI (match_dup 1) (match_dup 2)))]
3768  "TARGET_32BIT"
3769  "eors%?\\t%0, %1, %2"
3770  [(set_attr "conds" "set")
3771   (set_attr "type" "logics_imm,logics_reg")]
3772)
3773
3774(define_insn "*xorsi3_compare0_scratch"
3775  [(set (reg:CC_NZ CC_REGNUM)
3776	(compare:CC_NZ (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3777				 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3778			 (const_int 0)))]
3779  "TARGET_32BIT"
3780  "teq%?\\t%0, %1"
3781  [(set_attr "conds" "set")
3782   (set_attr "type" "logics_imm,logics_reg")]
3783)
3784
3785; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3786; (NOT D) we can sometimes merge the final NOT into one of the following
3787; insns.
3788
3789(define_split
3790  [(set (match_operand:SI 0 "s_register_operand" "")
3791	(ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3792			(not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3793		(match_operand:SI 3 "arm_rhs_operand" "")))
3794   (clobber (match_operand:SI 4 "s_register_operand" ""))]
3795  "TARGET_32BIT"
3796  [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3797			      (not:SI (match_dup 3))))
3798   (set (match_dup 0) (not:SI (match_dup 4)))]
3799  ""
3800)
3801
3802(define_insn_and_split "*andsi_iorsi3_notsi"
3803  [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3804	(and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3805			(match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3806		(not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3807  "TARGET_32BIT"
3808  "#"   ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3809  "&& reload_completed"
3810  [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3811   (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3812  {
3813     /* If operands[3] is a constant make sure to fold the NOT into it
3814	to avoid creating a NOT of a CONST_INT.  */
3815    rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3816    if (CONST_INT_P (not_rtx))
3817      {
3818	operands[4] = operands[0];
3819	operands[5] = not_rtx;
3820      }
3821    else
3822      {
3823	operands[5] = operands[0];
3824	operands[4] = not_rtx;
3825      }
3826  }
3827  [(set_attr "length" "8")
3828   (set_attr "ce_count" "2")
3829   (set_attr "predicable" "yes")
3830   (set_attr "type" "multiple")]
3831)
3832
3833; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3834; insns are available?
3835(define_split
3836  [(set (match_operand:SI 0 "s_register_operand" "")
3837	(match_operator:SI 1 "logical_binary_operator"
3838	 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3839			   (match_operand:SI 3 "const_int_operand" "")
3840			   (match_operand:SI 4 "const_int_operand" ""))
3841	  (match_operator:SI 9 "logical_binary_operator"
3842	   [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3843			 (match_operand:SI 6 "const_int_operand" ""))
3844	    (match_operand:SI 7 "s_register_operand" "")])]))
3845   (clobber (match_operand:SI 8 "s_register_operand" ""))]
3846  "TARGET_32BIT
3847   && GET_CODE (operands[1]) == GET_CODE (operands[9])
3848   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3849  [(set (match_dup 8)
3850	(match_op_dup 1
3851	 [(ashift:SI (match_dup 2) (match_dup 4))
3852	  (match_dup 5)]))
3853   (set (match_dup 0)
3854	(match_op_dup 1
3855	 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3856	  (match_dup 7)]))]
3857  "
3858  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3859")
3860
3861(define_split
3862  [(set (match_operand:SI 0 "s_register_operand" "")
3863	(match_operator:SI 1 "logical_binary_operator"
3864	 [(match_operator:SI 9 "logical_binary_operator"
3865	   [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3866			 (match_operand:SI 6 "const_int_operand" ""))
3867	    (match_operand:SI 7 "s_register_operand" "")])
3868	  (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3869			   (match_operand:SI 3 "const_int_operand" "")
3870			   (match_operand:SI 4 "const_int_operand" ""))]))
3871   (clobber (match_operand:SI 8 "s_register_operand" ""))]
3872  "TARGET_32BIT
3873   && GET_CODE (operands[1]) == GET_CODE (operands[9])
3874   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3875  [(set (match_dup 8)
3876	(match_op_dup 1
3877	 [(ashift:SI (match_dup 2) (match_dup 4))
3878	  (match_dup 5)]))
3879   (set (match_dup 0)
3880	(match_op_dup 1
3881	 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3882	  (match_dup 7)]))]
3883  "
3884  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3885")
3886
3887(define_split
3888  [(set (match_operand:SI 0 "s_register_operand" "")
3889	(match_operator:SI 1 "logical_binary_operator"
3890	 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3891			   (match_operand:SI 3 "const_int_operand" "")
3892			   (match_operand:SI 4 "const_int_operand" ""))
3893	  (match_operator:SI 9 "logical_binary_operator"
3894	   [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3895			 (match_operand:SI 6 "const_int_operand" ""))
3896	    (match_operand:SI 7 "s_register_operand" "")])]))
3897   (clobber (match_operand:SI 8 "s_register_operand" ""))]
3898  "TARGET_32BIT
3899   && GET_CODE (operands[1]) == GET_CODE (operands[9])
3900   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3901  [(set (match_dup 8)
3902	(match_op_dup 1
3903	 [(ashift:SI (match_dup 2) (match_dup 4))
3904	  (match_dup 5)]))
3905   (set (match_dup 0)
3906	(match_op_dup 1
3907	 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3908	  (match_dup 7)]))]
3909  "
3910  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3911")
3912
3913(define_split
3914  [(set (match_operand:SI 0 "s_register_operand" "")
3915	(match_operator:SI 1 "logical_binary_operator"
3916	 [(match_operator:SI 9 "logical_binary_operator"
3917	   [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3918			 (match_operand:SI 6 "const_int_operand" ""))
3919	    (match_operand:SI 7 "s_register_operand" "")])
3920	  (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3921			   (match_operand:SI 3 "const_int_operand" "")
3922			   (match_operand:SI 4 "const_int_operand" ""))]))
3923   (clobber (match_operand:SI 8 "s_register_operand" ""))]
3924  "TARGET_32BIT
3925   && GET_CODE (operands[1]) == GET_CODE (operands[9])
3926   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3927  [(set (match_dup 8)
3928	(match_op_dup 1
3929	 [(ashift:SI (match_dup 2) (match_dup 4))
3930	  (match_dup 5)]))
3931   (set (match_dup 0)
3932	(match_op_dup 1
3933	 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3934	  (match_dup 7)]))]
3935  "
3936  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3937")
3938
3939
3940;; Minimum and maximum insns
3941
3942(define_expand "smaxsi3"
3943  [(parallel [
3944    (set (match_operand:SI 0 "s_register_operand")
3945	 (smax:SI (match_operand:SI 1 "s_register_operand")
3946		  (match_operand:SI 2 "arm_rhs_operand")))
3947    (clobber (reg:CC CC_REGNUM))])]
3948  "TARGET_32BIT"
3949  "
3950  if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3951    {
3952      /* No need for a clobber of the condition code register here.  */
3953      emit_insn (gen_rtx_SET (operands[0],
3954			      gen_rtx_SMAX (SImode, operands[1],
3955					    operands[2])));
3956      DONE;
3957    }
3958")
3959
3960(define_insn "*smax_0"
3961  [(set (match_operand:SI 0 "s_register_operand" "=r")
3962	(smax:SI (match_operand:SI 1 "s_register_operand" "r")
3963		 (const_int 0)))]
3964  "TARGET_32BIT"
3965  "bic%?\\t%0, %1, %1, asr #31"
3966  [(set_attr "predicable" "yes")
3967   (set_attr "type" "logic_shift_reg")]
3968)
3969
3970(define_insn "*smax_m1"
3971  [(set (match_operand:SI 0 "s_register_operand" "=r")
3972	(smax:SI (match_operand:SI 1 "s_register_operand" "r")
3973		 (const_int -1)))]
3974  "TARGET_32BIT"
3975  "orr%?\\t%0, %1, %1, asr #31"
3976  [(set_attr "predicable" "yes")
3977   (set_attr "type" "logic_shift_reg")]
3978)
3979
3980(define_insn_and_split "*arm_smax_insn"
3981  [(set (match_operand:SI          0 "s_register_operand" "=r,r")
3982	(smax:SI (match_operand:SI 1 "s_register_operand"  "%0,?r")
3983		 (match_operand:SI 2 "arm_rhs_operand"    "rI,rI")))
3984   (clobber (reg:CC CC_REGNUM))]
3985  "TARGET_ARM"
3986  "#"
3987   ; cmp\\t%1, %2\;movlt\\t%0, %2
3988   ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3989  "TARGET_ARM"
3990  [(set (reg:CC CC_REGNUM)
3991        (compare:CC (match_dup 1) (match_dup 2)))
3992   (set (match_dup 0)
3993        (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3994                         (match_dup 1)
3995                         (match_dup 2)))]
3996  ""
3997  [(set_attr "conds" "clob")
3998   (set_attr "length" "8,12")
3999   (set_attr "type" "multiple")]
4000)
4001
4002(define_expand "sminsi3"
4003  [(parallel [
4004    (set (match_operand:SI 0 "s_register_operand")
4005	 (smin:SI (match_operand:SI 1 "s_register_operand")
4006		  (match_operand:SI 2 "arm_rhs_operand")))
4007    (clobber (reg:CC CC_REGNUM))])]
4008  "TARGET_32BIT"
4009  "
4010  if (operands[2] == const0_rtx)
4011    {
4012      /* No need for a clobber of the condition code register here.  */
4013      emit_insn (gen_rtx_SET (operands[0],
4014			      gen_rtx_SMIN (SImode, operands[1],
4015					    operands[2])));
4016      DONE;
4017    }
4018")
4019
4020(define_insn "*smin_0"
4021  [(set (match_operand:SI 0 "s_register_operand" "=r")
4022	(smin:SI (match_operand:SI 1 "s_register_operand" "r")
4023		 (const_int 0)))]
4024  "TARGET_32BIT"
4025  "and%?\\t%0, %1, %1, asr #31"
4026  [(set_attr "predicable" "yes")
4027   (set_attr "type" "logic_shift_reg")]
4028)
4029
4030(define_insn_and_split "*arm_smin_insn"
4031  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4032	(smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4033		 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4034   (clobber (reg:CC CC_REGNUM))]
4035  "TARGET_ARM"
4036  "#"
4037    ; cmp\\t%1, %2\;movge\\t%0, %2
4038    ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
4039  "TARGET_ARM"
4040  [(set (reg:CC CC_REGNUM)
4041        (compare:CC (match_dup 1) (match_dup 2)))
4042   (set (match_dup 0)
4043        (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
4044                         (match_dup 1)
4045                         (match_dup 2)))]
4046  ""
4047  [(set_attr "conds" "clob")
4048   (set_attr "length" "8,12")
4049   (set_attr "type" "multiple,multiple")]
4050)
4051
4052(define_expand "umaxsi3"
4053  [(parallel [
4054    (set (match_operand:SI 0 "s_register_operand")
4055	 (umax:SI (match_operand:SI 1 "s_register_operand")
4056		  (match_operand:SI 2 "arm_rhs_operand")))
4057    (clobber (reg:CC CC_REGNUM))])]
4058  "TARGET_32BIT"
4059  ""
4060)
4061
4062(define_insn_and_split "*arm_umaxsi3"
4063  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4064	(umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4065		 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4066   (clobber (reg:CC CC_REGNUM))]
4067  "TARGET_ARM"
4068  "#"
4069    ; cmp\\t%1, %2\;movcc\\t%0, %2
4070    ; cmp\\t%1, %2\;movcs\\t%0, %1
4071    ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
4072  "TARGET_ARM"
4073  [(set (reg:CC CC_REGNUM)
4074        (compare:CC (match_dup 1) (match_dup 2)))
4075   (set (match_dup 0)
4076        (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
4077                         (match_dup 1)
4078                         (match_dup 2)))]
4079  ""
4080  [(set_attr "conds" "clob")
4081   (set_attr "length" "8,8,12")
4082   (set_attr "type" "store_4")]
4083)
4084
4085(define_expand "uminsi3"
4086  [(parallel [
4087    (set (match_operand:SI 0 "s_register_operand")
4088	 (umin:SI (match_operand:SI 1 "s_register_operand")
4089		  (match_operand:SI 2 "arm_rhs_operand")))
4090    (clobber (reg:CC CC_REGNUM))])]
4091  "TARGET_32BIT"
4092  ""
4093)
4094
4095(define_insn_and_split "*arm_uminsi3"
4096  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4097	(umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4098		 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4099   (clobber (reg:CC CC_REGNUM))]
4100  "TARGET_ARM"
4101  "#"
4102   ; cmp\\t%1, %2\;movcs\\t%0, %2
4103   ; cmp\\t%1, %2\;movcc\\t%0, %1
4104   ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
4105  "TARGET_ARM"
4106  [(set (reg:CC CC_REGNUM)
4107        (compare:CC (match_dup 1) (match_dup 2)))
4108   (set (match_dup 0)
4109        (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
4110                         (match_dup 1)
4111                         (match_dup 2)))]
4112  ""
4113  [(set_attr "conds" "clob")
4114   (set_attr "length" "8,8,12")
4115   (set_attr "type" "store_4")]
4116)
4117
4118(define_insn "*store_minmaxsi"
4119  [(set (match_operand:SI 0 "memory_operand" "=m")
4120	(match_operator:SI 3 "minmax_operator"
4121	 [(match_operand:SI 1 "s_register_operand" "r")
4122	  (match_operand:SI 2 "s_register_operand" "r")]))
4123   (clobber (reg:CC CC_REGNUM))]
4124  "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
4125  "*
4126  operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
4127				operands[1], operands[2]);
4128  output_asm_insn (\"cmp\\t%1, %2\", operands);
4129  if (TARGET_THUMB2)
4130    output_asm_insn (\"ite\t%d3\", operands);
4131  output_asm_insn (\"str%d3\\t%1, %0\", operands);
4132  output_asm_insn (\"str%D3\\t%2, %0\", operands);
4133  return \"\";
4134  "
4135  [(set_attr "conds" "clob")
4136   (set (attr "length")
4137	(if_then_else (eq_attr "is_thumb" "yes")
4138		      (const_int 14)
4139		      (const_int 12)))
4140   (set_attr "type" "store_4")]
4141)
4142
4143; Reject the frame pointer in operand[1], since reloading this after
4144; it has been eliminated can cause carnage.
4145(define_insn "*minmax_arithsi"
4146  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4147	(match_operator:SI 4 "shiftable_operator"
4148	 [(match_operator:SI 5 "minmax_operator"
4149	   [(match_operand:SI 2 "s_register_operand" "r,r")
4150	    (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
4151	  (match_operand:SI 1 "s_register_operand" "0,?r")]))
4152   (clobber (reg:CC CC_REGNUM))]
4153  "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
4154  "*
4155  {
4156    enum rtx_code code = GET_CODE (operands[4]);
4157    bool need_else;
4158
4159    if (which_alternative != 0 || operands[3] != const0_rtx
4160        || (code != PLUS && code != IOR && code != XOR))
4161      need_else = true;
4162    else
4163      need_else = false;
4164
4165    operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
4166				  operands[2], operands[3]);
4167    output_asm_insn (\"cmp\\t%2, %3\", operands);
4168    if (TARGET_THUMB2)
4169      {
4170	if (need_else)
4171	  output_asm_insn (\"ite\\t%d5\", operands);
4172	else
4173	  output_asm_insn (\"it\\t%d5\", operands);
4174      }
4175    output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
4176    if (need_else)
4177      output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
4178    return \"\";
4179  }"
4180  [(set_attr "conds" "clob")
4181   (set (attr "length")
4182	(if_then_else (eq_attr "is_thumb" "yes")
4183		      (const_int 14)
4184		      (const_int 12)))
4185   (set_attr "type" "multiple")]
4186)
4187
4188; Reject the frame pointer in operand[1], since reloading this after
4189; it has been eliminated can cause carnage.
4190(define_insn_and_split "*minmax_arithsi_non_canon"
4191  [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
4192	(minus:SI
4193	 (match_operand:SI 1 "s_register_operand" "0,?Ts")
4194	  (match_operator:SI 4 "minmax_operator"
4195	   [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
4196	    (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
4197   (clobber (reg:CC CC_REGNUM))]
4198  "TARGET_32BIT && !arm_eliminable_register (operands[1])
4199   && !(arm_restrict_it && CONST_INT_P (operands[3]))"
4200  "#"
4201  "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
4202  [(set (reg:CC CC_REGNUM)
4203        (compare:CC (match_dup 2) (match_dup 3)))
4204
4205   (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
4206              (set (match_dup 0)
4207                   (minus:SI (match_dup 1)
4208                             (match_dup 2))))
4209   (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
4210              (set (match_dup 0)
4211                   (match_dup 6)))]
4212  {
4213  machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
4214                                           operands[2], operands[3]);
4215  enum rtx_code rc = minmax_code (operands[4]);
4216  operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
4217                                operands[2], operands[3]);
4218
4219  if (mode == CCFPmode || mode == CCFPEmode)
4220    rc = reverse_condition_maybe_unordered (rc);
4221  else
4222    rc = reverse_condition (rc);
4223  operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
4224  if (CONST_INT_P (operands[3]))
4225    operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
4226  else
4227    operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
4228  }
4229  [(set_attr "conds" "clob")
4230   (set (attr "length")
4231	(if_then_else (eq_attr "is_thumb" "yes")
4232		      (const_int 14)
4233		      (const_int 12)))
4234   (set_attr "type" "multiple")]
4235)
4236
4237
4238(define_expand "arm_<ss_op>"
4239  [(set (match_operand:SI 0 "s_register_operand")
4240	(SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand")
4241			(match_operand:SI 2 "s_register_operand")))]
4242  "TARGET_DSP_MULTIPLY"
4243  {
4244    if (ARM_Q_BIT_READ)
4245      emit_insn (gen_arm_<ss_op>_setq_insn (operands[0],
4246					    operands[1], operands[2]));
4247    else
4248      emit_insn (gen_arm_<ss_op>_insn (operands[0], operands[1], operands[2]));
4249    DONE;
4250  }
4251)
4252
4253(define_insn "arm_<ss_op><add_clobber_q_name>_insn"
4254  [(set (match_operand:SI 0 "s_register_operand" "=r")
4255	(SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand" "r")
4256			(match_operand:SI 2 "s_register_operand" "r")))]
4257  "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
4258  "<ss_op>%?\t%0, %1, %2"
4259  [(set_attr "predicable" "yes")
4260   (set_attr "type" "alu_dsp_reg")]
4261)
4262
4263(define_code_iterator SAT [smin smax])
4264(define_code_attr SATrev [(smin "smax") (smax "smin")])
4265(define_code_attr SATlo [(smin "1") (smax "2")])
4266(define_code_attr SAThi [(smin "2") (smax "1")])
4267
4268(define_expand "arm_ssat"
4269  [(match_operand:SI 0 "s_register_operand")
4270   (match_operand:SI 1 "s_register_operand")
4271   (match_operand:SI 2 "const_int_operand")]
4272  "TARGET_32BIT && arm_arch6"
4273  {
4274    HOST_WIDE_INT val = INTVAL (operands[2]);
4275    /* The builtin checking code should have ensured the right
4276       range for the immediate.  */
4277    gcc_assert (IN_RANGE (val, 1, 32));
4278    HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << (val - 1)) - 1;
4279    HOST_WIDE_INT lower_bound = -upper_bound - 1;
4280    rtx up_rtx = gen_int_mode (upper_bound, SImode);
4281    rtx lo_rtx = gen_int_mode (lower_bound, SImode);
4282    if (ARM_Q_BIT_READ)
4283      emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx,
4284				      up_rtx, operands[1]));
4285    else
4286      emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4287    DONE;
4288  }
4289)
4290
4291(define_expand "arm_usat"
4292  [(match_operand:SI 0 "s_register_operand")
4293   (match_operand:SI 1 "s_register_operand")
4294   (match_operand:SI 2 "const_int_operand")]
4295  "TARGET_32BIT && arm_arch6"
4296  {
4297    HOST_WIDE_INT val = INTVAL (operands[2]);
4298    /* The builtin checking code should have ensured the right
4299       range for the immediate.  */
4300    gcc_assert (IN_RANGE (val, 0, 31));
4301    HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << val) - 1;
4302    rtx up_rtx = gen_int_mode (upper_bound, SImode);
4303    rtx lo_rtx = CONST0_RTX (SImode);
4304    if (ARM_Q_BIT_READ)
4305      emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, up_rtx,
4306				      operands[1]));
4307    else
4308      emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4309    DONE;
4310  }
4311)
4312
4313(define_insn "arm_get_apsr"
4314  [(set (match_operand:SI 0 "s_register_operand" "=r")
4315	(unspec:SI [(reg:CC APSRQ_REGNUM)] UNSPEC_APSR_READ))]
4316  "TARGET_ARM_QBIT"
4317  "mrs%?\t%0, APSR"
4318  [(set_attr "predicable" "yes")
4319   (set_attr "conds" "use")]
4320)
4321
4322(define_insn "arm_set_apsr"
4323  [(set (reg:CC APSRQ_REGNUM)
4324	(unspec_volatile:CC
4325	  [(match_operand:SI 0 "s_register_operand" "r")] VUNSPEC_APSR_WRITE))]
4326  "TARGET_ARM_QBIT"
4327  "msr%?\tAPSR_nzcvq, %0"
4328  [(set_attr "predicable" "yes")
4329   (set_attr "conds" "set")]
4330)
4331
4332;; Read the APSR and extract the Q bit (bit 27)
4333(define_expand "arm_saturation_occurred"
4334  [(match_operand:SI 0 "s_register_operand")]
4335  "TARGET_ARM_QBIT"
4336  {
4337    rtx apsr = gen_reg_rtx (SImode);
4338    emit_insn (gen_arm_get_apsr (apsr));
4339    emit_insn (gen_extzv (operands[0], apsr, CONST1_RTX (SImode),
4340	       gen_int_mode (27, SImode)));
4341    DONE;
4342  }
4343)
4344
4345;; Read the APSR and set the Q bit (bit position 27) according to operand 0
4346(define_expand "arm_set_saturation"
4347  [(match_operand:SI 0 "reg_or_int_operand")]
4348  "TARGET_ARM_QBIT"
4349  {
4350    rtx apsr = gen_reg_rtx (SImode);
4351    emit_insn (gen_arm_get_apsr (apsr));
4352    rtx to_insert = gen_reg_rtx (SImode);
4353    if (CONST_INT_P (operands[0]))
4354      emit_move_insn (to_insert, operands[0] == CONST0_RTX (SImode)
4355				 ? CONST0_RTX (SImode) : CONST1_RTX (SImode));
4356    else
4357      {
4358        rtx cmp = gen_rtx_NE (SImode, operands[0], CONST0_RTX (SImode));
4359        emit_insn (gen_cstoresi4 (to_insert, cmp, operands[0],
4360				  CONST0_RTX (SImode)));
4361      }
4362    emit_insn (gen_insv (apsr, CONST1_RTX (SImode),
4363	       gen_int_mode (27, SImode), to_insert));
4364    emit_insn (gen_arm_set_apsr (apsr));
4365    DONE;
4366  }
4367)
4368
4369(define_insn "satsi_<SAT:code><add_clobber_q_name>"
4370  [(set (match_operand:SI 0 "s_register_operand" "=r")
4371        (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4372                           (match_operand:SI 1 "const_int_operand" "i"))
4373                (match_operand:SI 2 "const_int_operand" "i")))]
4374  "TARGET_32BIT && arm_arch6 && <add_clobber_q_pred>
4375   && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4376{
4377  int mask;
4378  bool signed_sat;
4379  if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4380                               &mask, &signed_sat))
4381    gcc_unreachable ();
4382
4383  operands[1] = GEN_INT (mask);
4384  if (signed_sat)
4385    return "ssat%?\t%0, %1, %3";
4386  else
4387    return "usat%?\t%0, %1, %3";
4388}
4389  [(set_attr "predicable" "yes")
4390   (set_attr "type" "alus_imm")]
4391)
4392
4393(define_insn "*satsi_<SAT:code>_shift"
4394  [(set (match_operand:SI 0 "s_register_operand" "=r")
4395        (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4396                             [(match_operand:SI 4 "s_register_operand" "r")
4397                              (match_operand:SI 5 "const_int_operand" "i")])
4398                           (match_operand:SI 1 "const_int_operand" "i"))
4399                (match_operand:SI 2 "const_int_operand" "i")))]
4400  "TARGET_32BIT && arm_arch6 && !ARM_Q_BIT_READ
4401   && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4402{
4403  int mask;
4404  bool signed_sat;
4405  if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4406                               &mask, &signed_sat))
4407    gcc_unreachable ();
4408
4409  operands[1] = GEN_INT (mask);
4410  if (signed_sat)
4411    return "ssat%?\t%0, %1, %4%S3";
4412  else
4413    return "usat%?\t%0, %1, %4%S3";
4414}
4415  [(set_attr "predicable" "yes")
4416   (set_attr "shift" "3")
4417   (set_attr "type" "logic_shift_reg")])
4418
4419;; Custom Datapath Extension insns.
4420(define_insn "arm_cx1<mode>"
4421   [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4422	 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4423	               (match_operand:SI 2 "const_int_ccde1_operand" "i")]
4424	    UNSPEC_CDE))]
4425   "TARGET_CDE"
4426   "cx1<cde_suffix>\\tp%c1, <cde_dest>, %2"
4427  [(set_attr "type" "coproc")]
4428)
4429
4430(define_insn "arm_cx1a<mode>"
4431   [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4432	 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4433		       (match_operand:SIDI 2 "s_register_operand" "0")
4434	               (match_operand:SI 3 "const_int_ccde1_operand" "i")]
4435	    UNSPEC_CDEA))]
4436   "TARGET_CDE"
4437   "cx1<cde_suffix>a\\tp%c1, <cde_dest>, %3"
4438  [(set_attr "type" "coproc")]
4439)
4440
4441(define_insn "arm_cx2<mode>"
4442   [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4443	 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4444		       (match_operand:SI 2 "s_register_operand" "r")
4445	               (match_operand:SI 3 "const_int_ccde2_operand" "i")]
4446	    UNSPEC_CDE))]
4447   "TARGET_CDE"
4448   "cx2<cde_suffix>\\tp%c1, <cde_dest>, %2, %3"
4449  [(set_attr "type" "coproc")]
4450)
4451
4452(define_insn "arm_cx2a<mode>"
4453   [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4454	 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4455		       (match_operand:SIDI 2 "s_register_operand" "0")
4456		       (match_operand:SI 3 "s_register_operand" "r")
4457	               (match_operand:SI 4 "const_int_ccde2_operand" "i")]
4458	    UNSPEC_CDEA))]
4459   "TARGET_CDE"
4460   "cx2<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4"
4461  [(set_attr "type" "coproc")]
4462)
4463
4464(define_insn "arm_cx3<mode>"
4465   [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4466	 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4467		       (match_operand:SI 2 "s_register_operand" "r")
4468		       (match_operand:SI 3 "s_register_operand" "r")
4469	               (match_operand:SI 4 "const_int_ccde3_operand" "i")]
4470	    UNSPEC_CDE))]
4471   "TARGET_CDE"
4472   "cx3<cde_suffix>\\tp%c1, <cde_dest>, %2, %3, %4"
4473  [(set_attr "type" "coproc")]
4474)
4475
4476(define_insn "arm_cx3a<mode>"
4477   [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4478	 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4479		       (match_operand:SIDI 2 "s_register_operand" "0")
4480		       (match_operand:SI 3 "s_register_operand" "r")
4481		       (match_operand:SI 4 "s_register_operand" "r")
4482                       (match_operand:SI 5 "const_int_ccde3_operand" "i")]
4483	    UNSPEC_CDEA))]
4484   "TARGET_CDE"
4485   "cx3<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4, %5"
4486  [(set_attr "type" "coproc")]
4487)
4488
4489;; Shift and rotation insns
4490
4491(define_expand "ashldi3"
4492  [(set (match_operand:DI            0 "s_register_operand")
4493        (ashift:DI (match_operand:DI 1 "s_register_operand")
4494                   (match_operand:SI 2 "reg_or_int_operand")))]
4495  "TARGET_32BIT"
4496  "
4497  if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN)
4498    {
4499      if (!reg_or_int_operand (operands[2], SImode))
4500        operands[2] = force_reg (SImode, operands[2]);
4501
4502      /* Armv8.1-M Mainline double shifts are not expanded.  */
4503      if (arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2]))
4504	  && (REG_P (operands[2]) || INTVAL(operands[2]) != 32))
4505        {
4506	  if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4507	    emit_insn (gen_movdi (operands[0], operands[1]));
4508
4509	  emit_insn (gen_thumb2_lsll (operands[0], operands[2]));
4510	  DONE;
4511	}
4512    }
4513
4514  arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4515				 operands[2], gen_reg_rtx (SImode),
4516				 gen_reg_rtx (SImode));
4517  DONE;
4518")
4519
4520(define_expand "ashlsi3"
4521  [(set (match_operand:SI            0 "s_register_operand")
4522	(ashift:SI (match_operand:SI 1 "s_register_operand")
4523		   (match_operand:SI 2 "arm_rhs_operand")))]
4524  "TARGET_EITHER"
4525  "
4526  if (CONST_INT_P (operands[2])
4527      && (UINTVAL (operands[2])) > 31)
4528    {
4529      emit_insn (gen_movsi (operands[0], const0_rtx));
4530      DONE;
4531    }
4532  "
4533)
4534
4535(define_expand "ashrdi3"
4536  [(set (match_operand:DI              0 "s_register_operand")
4537        (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4538                     (match_operand:SI 2 "reg_or_int_operand")))]
4539  "TARGET_32BIT"
4540  "
4541  /* Armv8.1-M Mainline double shifts are not expanded.  */
4542  if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4543      && arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2])))
4544    {
4545      if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4546	emit_insn (gen_movdi (operands[0], operands[1]));
4547
4548      emit_insn (gen_thumb2_asrl (operands[0], operands[2]));
4549      DONE;
4550    }
4551
4552  arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4553				 operands[2], gen_reg_rtx (SImode),
4554				 gen_reg_rtx (SImode));
4555  DONE;
4556")
4557
4558(define_expand "ashrsi3"
4559  [(set (match_operand:SI              0 "s_register_operand")
4560	(ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4561		     (match_operand:SI 2 "arm_rhs_operand")))]
4562  "TARGET_EITHER"
4563  "
4564  if (CONST_INT_P (operands[2])
4565      && UINTVAL (operands[2]) > 31)
4566    operands[2] = GEN_INT (31);
4567  "
4568)
4569
4570(define_expand "lshrdi3"
4571  [(set (match_operand:DI              0 "s_register_operand")
4572        (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4573                     (match_operand:SI 2 "reg_or_int_operand")))]
4574  "TARGET_32BIT"
4575  "
4576  /* Armv8.1-M Mainline double shifts are not expanded.  */
4577  if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4578    && long_shift_imm (operands[2], GET_MODE (operands[2])))
4579    {
4580      if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4581        emit_insn (gen_movdi (operands[0], operands[1]));
4582
4583      emit_insn (gen_thumb2_lsrl (operands[0], operands[2]));
4584      DONE;
4585    }
4586
4587  arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4588				 operands[2], gen_reg_rtx (SImode),
4589				 gen_reg_rtx (SImode));
4590  DONE;
4591")
4592
4593(define_expand "lshrsi3"
4594  [(set (match_operand:SI              0 "s_register_operand")
4595	(lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4596		     (match_operand:SI 2 "arm_rhs_operand")))]
4597  "TARGET_EITHER"
4598  "
4599  if (CONST_INT_P (operands[2])
4600      && (UINTVAL (operands[2])) > 31)
4601    {
4602      emit_insn (gen_movsi (operands[0], const0_rtx));
4603      DONE;
4604    }
4605  "
4606)
4607
4608(define_expand "rotlsi3"
4609  [(set (match_operand:SI              0 "s_register_operand")
4610	(rotatert:SI (match_operand:SI 1 "s_register_operand")
4611		     (match_operand:SI 2 "reg_or_int_operand")))]
4612  "TARGET_32BIT"
4613  "
4614  if (CONST_INT_P (operands[2]))
4615    operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4616  else
4617    {
4618      rtx reg = gen_reg_rtx (SImode);
4619      emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4620      operands[2] = reg;
4621    }
4622  "
4623)
4624
4625(define_expand "rotrsi3"
4626  [(set (match_operand:SI              0 "s_register_operand")
4627	(rotatert:SI (match_operand:SI 1 "s_register_operand")
4628		     (match_operand:SI 2 "arm_rhs_operand")))]
4629  "TARGET_EITHER"
4630  "
4631  if (TARGET_32BIT)
4632    {
4633      if (CONST_INT_P (operands[2])
4634          && UINTVAL (operands[2]) > 31)
4635        operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4636    }
4637  else /* TARGET_THUMB1 */
4638    {
4639      if (CONST_INT_P (operands [2]))
4640        operands [2] = force_reg (SImode, operands[2]);
4641    }
4642  "
4643)
4644
4645(define_insn "*arm_shiftsi3"
4646  [(set (match_operand:SI   0 "s_register_operand" "=l,l,r,r")
4647	(match_operator:SI  3 "shift_operator"
4648	 [(match_operand:SI 1 "s_register_operand"  "0,l,r,r")
4649	  (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4650  "TARGET_32BIT"
4651  "* return arm_output_shift(operands, 0);"
4652  [(set_attr "predicable" "yes")
4653   (set_attr "arch" "t2,t2,*,*")
4654   (set_attr "predicable_short_it" "yes,yes,no,no")
4655   (set_attr "length" "4")
4656   (set_attr "shift" "1")
4657   (set_attr "autodetect_type" "alu_shift_operator3")]
4658)
4659
4660(define_insn "*shiftsi3_compare0"
4661  [(set (reg:CC_NZ CC_REGNUM)
4662	(compare:CC_NZ (match_operator:SI 3 "shift_operator"
4663			  [(match_operand:SI 1 "s_register_operand" "r,r")
4664			   (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4665			 (const_int 0)))
4666   (set (match_operand:SI 0 "s_register_operand" "=r,r")
4667	(match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4668  "TARGET_32BIT"
4669  "* return arm_output_shift(operands, 1);"
4670  [(set_attr "conds" "set")
4671   (set_attr "shift" "1")
4672   (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4673)
4674
4675(define_insn "*shiftsi3_compare0_scratch"
4676  [(set (reg:CC_NZ CC_REGNUM)
4677	(compare:CC_NZ (match_operator:SI 3 "shift_operator"
4678			  [(match_operand:SI 1 "s_register_operand" "r,r")
4679			   (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4680			 (const_int 0)))
4681   (clobber (match_scratch:SI 0 "=r,r"))]
4682  "TARGET_32BIT"
4683  "* return arm_output_shift(operands, 1);"
4684  [(set_attr "conds" "set")
4685   (set_attr "shift" "1")
4686   (set_attr "type" "shift_imm,shift_reg")]
4687)
4688
4689(define_insn "*not_shiftsi"
4690  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4691	(not:SI (match_operator:SI 3 "shift_operator"
4692		 [(match_operand:SI 1 "s_register_operand" "r,r")
4693		  (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
4694  "TARGET_32BIT"
4695  "mvn%?\\t%0, %1%S3"
4696  [(set_attr "predicable" "yes")
4697   (set_attr "shift" "1")
4698   (set_attr "arch" "32,a")
4699   (set_attr "type" "mvn_shift,mvn_shift_reg")])
4700
4701(define_insn "*not_shiftsi_compare0"
4702  [(set (reg:CC_NZ CC_REGNUM)
4703	(compare:CC_NZ
4704	 (not:SI (match_operator:SI 3 "shift_operator"
4705		  [(match_operand:SI 1 "s_register_operand" "r,r")
4706		   (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4707	 (const_int 0)))
4708   (set (match_operand:SI 0 "s_register_operand" "=r,r")
4709	(not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4710  "TARGET_32BIT"
4711  "mvns%?\\t%0, %1%S3"
4712  [(set_attr "conds" "set")
4713   (set_attr "shift" "1")
4714   (set_attr "arch" "32,a")
4715   (set_attr "type" "mvn_shift,mvn_shift_reg")])
4716
4717(define_insn "*not_shiftsi_compare0_scratch"
4718  [(set (reg:CC_NZ CC_REGNUM)
4719	(compare:CC_NZ
4720	 (not:SI (match_operator:SI 3 "shift_operator"
4721		  [(match_operand:SI 1 "s_register_operand" "r,r")
4722		   (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4723	 (const_int 0)))
4724   (clobber (match_scratch:SI 0 "=r,r"))]
4725  "TARGET_32BIT"
4726  "mvns%?\\t%0, %1%S3"
4727  [(set_attr "conds" "set")
4728   (set_attr "shift" "1")
4729   (set_attr "arch" "32,a")
4730   (set_attr "type" "mvn_shift,mvn_shift_reg")])
4731
4732;; We don't really have extzv, but defining this using shifts helps
4733;; to reduce register pressure later on.
4734
4735(define_expand "extzv"
4736  [(set (match_operand 0 "s_register_operand")
4737	(zero_extract (match_operand 1 "nonimmediate_operand")
4738		      (match_operand 2 "const_int_operand")
4739		      (match_operand 3 "const_int_operand")))]
4740  "TARGET_THUMB1 || arm_arch_thumb2"
4741  "
4742  {
4743    HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4744    HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4745
4746    if (arm_arch_thumb2)
4747      {
4748	HOST_WIDE_INT width = INTVAL (operands[2]);
4749	HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4750
4751	if (unaligned_access && MEM_P (operands[1])
4752	    && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4753	  {
4754	    rtx base_addr;
4755
4756	    if (BYTES_BIG_ENDIAN)
4757	      bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4758		       - bitpos;
4759
4760	    if (width == 32)
4761              {
4762		base_addr = adjust_address (operands[1], SImode,
4763					    bitpos / BITS_PER_UNIT);
4764		emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4765              }
4766	    else
4767              {
4768		rtx dest = operands[0];
4769		rtx tmp = gen_reg_rtx (SImode);
4770
4771		/* We may get a paradoxical subreg here.  Strip it off.  */
4772		if (GET_CODE (dest) == SUBREG
4773		    && GET_MODE (dest) == SImode
4774		    && GET_MODE (SUBREG_REG (dest)) == HImode)
4775		  dest = SUBREG_REG (dest);
4776
4777		if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4778		  FAIL;
4779
4780		base_addr = adjust_address (operands[1], HImode,
4781					    bitpos / BITS_PER_UNIT);
4782		emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4783		emit_move_insn (gen_lowpart (SImode, dest), tmp);
4784	      }
4785	    DONE;
4786	  }
4787	else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4788	  {
4789	    emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4790				     operands[3]));
4791	    DONE;
4792	  }
4793	else
4794	  FAIL;
4795      }
4796
4797    if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4798      FAIL;
4799
4800    operands[3] = GEN_INT (rshift);
4801
4802    if (lshift == 0)
4803      {
4804        emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4805        DONE;
4806      }
4807
4808    emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4809			     operands[3], gen_reg_rtx (SImode)));
4810    DONE;
4811  }"
4812)
4813
4814;; Helper for extzv, for the Thumb-1 register-shifts case.
4815
4816(define_expand "extzv_t1"
4817  [(set (match_operand:SI 4 "s_register_operand")
4818	(ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4819		   (match_operand:SI 2 "const_int_operand")))
4820   (set (match_operand:SI 0 "s_register_operand")
4821	(lshiftrt:SI (match_dup 4)
4822		     (match_operand:SI 3 "const_int_operand")))]
4823  "TARGET_THUMB1"
4824  "")
4825
4826(define_expand "extv"
4827  [(set (match_operand 0 "s_register_operand")
4828	(sign_extract (match_operand 1 "nonimmediate_operand")
4829		      (match_operand 2 "const_int_operand")
4830		      (match_operand 3 "const_int_operand")))]
4831  "arm_arch_thumb2"
4832{
4833  HOST_WIDE_INT width = INTVAL (operands[2]);
4834  HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4835
4836  if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4837      && (bitpos % BITS_PER_UNIT)  == 0)
4838    {
4839      rtx base_addr;
4840
4841      if (BYTES_BIG_ENDIAN)
4842	bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4843
4844      if (width == 32)
4845        {
4846	  base_addr = adjust_address (operands[1], SImode,
4847				      bitpos / BITS_PER_UNIT);
4848	  emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4849        }
4850      else
4851        {
4852	  rtx dest = operands[0];
4853	  rtx tmp = gen_reg_rtx (SImode);
4854
4855	  /* We may get a paradoxical subreg here.  Strip it off.  */
4856	  if (GET_CODE (dest) == SUBREG
4857	      && GET_MODE (dest) == SImode
4858	      && GET_MODE (SUBREG_REG (dest)) == HImode)
4859	    dest = SUBREG_REG (dest);
4860
4861	  if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4862	    FAIL;
4863
4864	  base_addr = adjust_address (operands[1], HImode,
4865				      bitpos / BITS_PER_UNIT);
4866	  emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4867	  emit_move_insn (gen_lowpart (SImode, dest), tmp);
4868	}
4869
4870      DONE;
4871    }
4872  else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4873    FAIL;
4874  else if (GET_MODE (operands[0]) == SImode
4875	   && GET_MODE (operands[1]) == SImode)
4876    {
4877      emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4878				 operands[3]));
4879      DONE;
4880    }
4881
4882  FAIL;
4883})
4884
4885; Helper to expand register forms of extv with the proper modes.
4886
4887(define_expand "extv_regsi"
4888  [(set (match_operand:SI 0 "s_register_operand")
4889	(sign_extract:SI (match_operand:SI 1 "s_register_operand")
4890			 (match_operand 2 "const_int_operand")
4891			 (match_operand 3 "const_int_operand")))]
4892  ""
4893{
4894})
4895
4896; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4897
4898(define_insn "unaligned_loaddi"
4899  [(set (match_operand:DI 0 "s_register_operand" "=r")
4900	(unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4901		   UNSPEC_UNALIGNED_LOAD))]
4902  "TARGET_32BIT && TARGET_LDRD"
4903  "*
4904  return output_move_double (operands, true, NULL);
4905  "
4906  [(set_attr "length" "8")
4907   (set_attr "type" "load_8")])
4908
4909(define_insn "unaligned_loadsi"
4910  [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4911	(unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4912		   UNSPEC_UNALIGNED_LOAD))]
4913  "unaligned_access"
4914  "@
4915   ldr\t%0, %1\t@ unaligned
4916   ldr%?\t%0, %1\t@ unaligned
4917   ldr%?\t%0, %1\t@ unaligned"
4918  [(set_attr "arch" "t1,t2,32")
4919   (set_attr "length" "2,2,4")
4920   (set_attr "predicable" "no,yes,yes")
4921   (set_attr "predicable_short_it" "no,yes,no")
4922   (set_attr "type" "load_4")])
4923
4924;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4925;; address (there's no immediate format).  That's tricky to support
4926;; here and we don't really need this pattern for that case, so only
4927;; enable for 32-bit ISAs.
4928(define_insn "unaligned_loadhis"
4929  [(set (match_operand:SI 0 "s_register_operand" "=r")
4930	(sign_extend:SI
4931	  (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4932		     UNSPEC_UNALIGNED_LOAD)))]
4933  "unaligned_access && TARGET_32BIT"
4934  "ldrsh%?\t%0, %1\t@ unaligned"
4935  [(set_attr "predicable" "yes")
4936   (set_attr "type" "load_byte")])
4937
4938(define_insn "unaligned_loadhiu"
4939  [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4940	(zero_extend:SI
4941	  (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4942		     UNSPEC_UNALIGNED_LOAD)))]
4943  "unaligned_access"
4944  "@
4945   ldrh\t%0, %1\t@ unaligned
4946   ldrh%?\t%0, %1\t@ unaligned
4947   ldrh%?\t%0, %1\t@ unaligned"
4948  [(set_attr "arch" "t1,t2,32")
4949   (set_attr "length" "2,2,4")
4950   (set_attr "predicable" "no,yes,yes")
4951   (set_attr "predicable_short_it" "no,yes,no")
4952   (set_attr "type" "load_byte")])
4953
4954(define_insn "unaligned_storedi"
4955  [(set (match_operand:DI 0 "memory_operand" "=m")
4956	(unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4957		   UNSPEC_UNALIGNED_STORE))]
4958  "TARGET_32BIT && TARGET_LDRD"
4959  "*
4960  return output_move_double (operands, true, NULL);
4961  "
4962  [(set_attr "length" "8")
4963   (set_attr "type" "store_8")])
4964
4965(define_insn "unaligned_storesi"
4966  [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4967	(unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4968		   UNSPEC_UNALIGNED_STORE))]
4969  "unaligned_access"
4970  "@
4971   str\t%1, %0\t@ unaligned
4972   str%?\t%1, %0\t@ unaligned
4973   str%?\t%1, %0\t@ unaligned"
4974  [(set_attr "arch" "t1,t2,32")
4975   (set_attr "length" "2,2,4")
4976   (set_attr "predicable" "no,yes,yes")
4977   (set_attr "predicable_short_it" "no,yes,no")
4978   (set_attr "type" "store_4")])
4979
4980(define_insn "unaligned_storehi"
4981  [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4982	(unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4983		   UNSPEC_UNALIGNED_STORE))]
4984  "unaligned_access"
4985  "@
4986   strh\t%1, %0\t@ unaligned
4987   strh%?\t%1, %0\t@ unaligned
4988   strh%?\t%1, %0\t@ unaligned"
4989  [(set_attr "arch" "t1,t2,32")
4990   (set_attr "length" "2,2,4")
4991   (set_attr "predicable" "no,yes,yes")
4992   (set_attr "predicable_short_it" "no,yes,no")
4993   (set_attr "type" "store_4")])
4994
4995
4996(define_insn "*extv_reg"
4997  [(set (match_operand:SI 0 "s_register_operand" "=r")
4998	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4999			  (match_operand:SI 2 "const_int_operand" "n")
5000			  (match_operand:SI 3 "const_int_operand" "n")))]
5001  "arm_arch_thumb2
5002   && IN_RANGE (INTVAL (operands[3]), 0, 31)
5003   && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5004  "sbfx%?\t%0, %1, %3, %2"
5005  [(set_attr "length" "4")
5006   (set_attr "predicable" "yes")
5007   (set_attr "type" "bfm")]
5008)
5009
5010(define_insn "extzv_t2"
5011  [(set (match_operand:SI 0 "s_register_operand" "=r")
5012	(zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
5013			  (match_operand:SI 2 "const_int_operand" "n")
5014			  (match_operand:SI 3 "const_int_operand" "n")))]
5015  "arm_arch_thumb2
5016   && IN_RANGE (INTVAL (operands[3]), 0, 31)
5017   && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5018  "ubfx%?\t%0, %1, %3, %2"
5019  [(set_attr "length" "4")
5020   (set_attr "predicable" "yes")
5021   (set_attr "type" "bfm")]
5022)
5023
5024
5025;; Division instructions
5026(define_insn "divsi3"
5027  [(set (match_operand:SI	  0 "s_register_operand" "=r,r")
5028	(div:SI (match_operand:SI 1 "s_register_operand"  "r,r")
5029		(match_operand:SI 2 "s_register_operand"  "r,r")))]
5030  "TARGET_IDIV"
5031  "@
5032   sdiv%?\t%0, %1, %2
5033   sdiv\t%0, %1, %2"
5034  [(set_attr "arch" "32,v8mb")
5035   (set_attr "predicable" "yes")
5036   (set_attr "type" "sdiv")]
5037)
5038
5039(define_insn "udivsi3"
5040  [(set (match_operand:SI	   0 "s_register_operand" "=r,r")
5041	(udiv:SI (match_operand:SI 1 "s_register_operand"  "r,r")
5042		 (match_operand:SI 2 "s_register_operand"  "r,r")))]
5043  "TARGET_IDIV"
5044  "@
5045   udiv%?\t%0, %1, %2
5046   udiv\t%0, %1, %2"
5047  [(set_attr "arch" "32,v8mb")
5048   (set_attr "predicable" "yes")
5049   (set_attr "type" "udiv")]
5050)
5051
5052
5053;; Unary arithmetic insns
5054
5055(define_expand "negv<SIDI:mode>3"
5056  [(match_operand:SIDI 0 "s_register_operand")
5057   (match_operand:SIDI 1 "s_register_operand")
5058   (match_operand 2 "")]
5059  "TARGET_32BIT"
5060{
5061  emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
5062			      operands[2]));
5063  DONE;
5064})
5065
5066(define_expand "negsi2"
5067  [(set (match_operand:SI         0 "s_register_operand")
5068	(neg:SI (match_operand:SI 1 "s_register_operand")))]
5069  "TARGET_EITHER"
5070  ""
5071)
5072
5073(define_insn "*arm_negsi2"
5074  [(set (match_operand:SI         0 "s_register_operand" "=l,r")
5075	(neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5076  "TARGET_32BIT"
5077  "rsb%?\\t%0, %1, #0"
5078  [(set_attr "predicable" "yes")
5079   (set_attr "predicable_short_it" "yes,no")
5080   (set_attr "arch" "t2,*")
5081   (set_attr "length" "4")
5082   (set_attr "type" "alu_imm")]
5083)
5084
5085;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
5086;; rather than (0 cmp reg).  This gives the same results for unsigned
5087;; and equality compares which is what we mostly need here.
5088(define_insn "negsi2_0compare"
5089  [(set (reg:CC_RSB CC_REGNUM)
5090	(compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
5091			(const_int -1)))
5092   (set (match_operand:SI 0 "s_register_operand" "=l,r")
5093	(neg:SI (match_dup 1)))]
5094  "TARGET_32BIT"
5095  "@
5096   negs\\t%0, %1
5097   rsbs\\t%0, %1, #0"
5098  [(set_attr "conds" "set")
5099   (set_attr "arch" "t2,*")
5100   (set_attr "length" "2,*")
5101   (set_attr "type" "alus_imm")]
5102)
5103
5104(define_insn "negsi2_carryin"
5105  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5106	(minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
5107		  (match_operand:SI 2 "arm_borrow_operation" "")))]
5108  "TARGET_32BIT"
5109  "@
5110   rsc\\t%0, %1, #0
5111   sbc\\t%0, %1, %1, lsl #1"
5112  [(set_attr "conds" "use")
5113   (set_attr "arch" "a,t2")
5114   (set_attr "type" "adc_imm,adc_reg")]
5115)
5116
5117(define_expand "negsf2"
5118  [(set (match_operand:SF         0 "s_register_operand")
5119	(neg:SF (match_operand:SF 1 "s_register_operand")))]
5120  "TARGET_32BIT && TARGET_HARD_FLOAT"
5121  ""
5122)
5123
5124(define_expand "negdf2"
5125  [(set (match_operand:DF         0 "s_register_operand")
5126	(neg:DF (match_operand:DF 1 "s_register_operand")))]
5127  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5128  "")
5129
5130;; abssi2 doesn't really clobber the condition codes if a different register
5131;; is being set.  To keep things simple, assume during rtl manipulations that
5132;; it does, but tell the final scan operator the truth.  Similarly for
5133;; (neg (abs...))
5134
5135(define_expand "abssi2"
5136  [(parallel
5137    [(set (match_operand:SI         0 "s_register_operand")
5138	  (abs:SI (match_operand:SI 1 "s_register_operand")))
5139     (clobber (match_dup 2))])]
5140  "TARGET_EITHER"
5141  "
5142  if (TARGET_THUMB1)
5143    operands[2] = gen_rtx_SCRATCH (SImode);
5144  else
5145    operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5146")
5147
5148(define_insn_and_split "*arm_abssi2"
5149  [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5150	(abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5151   (clobber (reg:CC CC_REGNUM))]
5152  "TARGET_ARM"
5153  "#"
5154  "&& reload_completed"
5155  [(const_int 0)]
5156  {
5157   /* if (which_alternative == 0) */
5158   if (REGNO(operands[0]) == REGNO(operands[1]))
5159     {
5160      /* Emit the pattern:
5161         cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5162         [(set (reg:CC CC_REGNUM)
5163               (compare:CC (match_dup 0) (const_int 0)))
5164          (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5165                     (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5166      */
5167      emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5168                              gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5169      emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5170                                    (gen_rtx_LT (SImode,
5171                                                 gen_rtx_REG (CCmode, CC_REGNUM),
5172                                                 const0_rtx)),
5173                                    (gen_rtx_SET (operands[0],
5174                                                  (gen_rtx_MINUS (SImode,
5175                                                                  const0_rtx,
5176                                                                  operands[1]))))));
5177      DONE;
5178     }
5179   else
5180     {
5181      /* Emit the pattern:
5182         alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5183         [(set (match_dup 0)
5184               (xor:SI (match_dup 1)
5185                       (ashiftrt:SI (match_dup 1) (const_int 31))))
5186          (set (match_dup 0)
5187               (minus:SI (match_dup 0)
5188                      (ashiftrt:SI (match_dup 1) (const_int 31))))]
5189      */
5190      emit_insn (gen_rtx_SET (operands[0],
5191                              gen_rtx_XOR (SImode,
5192                                           gen_rtx_ASHIFTRT (SImode,
5193                                                             operands[1],
5194                                                             GEN_INT (31)),
5195                                           operands[1])));
5196      emit_insn (gen_rtx_SET (operands[0],
5197                              gen_rtx_MINUS (SImode,
5198                                             operands[0],
5199                                             gen_rtx_ASHIFTRT (SImode,
5200                                                               operands[1],
5201                                                               GEN_INT (31)))));
5202      DONE;
5203     }
5204  }
5205  [(set_attr "conds" "clob,*")
5206   (set_attr "shift" "1")
5207   (set_attr "predicable" "no, yes")
5208   (set_attr "length" "8")
5209   (set_attr "type" "multiple")]
5210)
5211
5212(define_insn_and_split "*arm_neg_abssi2"
5213  [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5214	(neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5215   (clobber (reg:CC CC_REGNUM))]
5216  "TARGET_ARM"
5217  "#"
5218  "&& reload_completed"
5219  [(const_int 0)]
5220  {
5221   /* if (which_alternative == 0) */
5222   if (REGNO (operands[0]) == REGNO (operands[1]))
5223     {
5224      /* Emit the pattern:
5225         cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5226      */
5227      emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5228                              gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5229      emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5230                                    gen_rtx_GT (SImode,
5231                                                gen_rtx_REG (CCmode, CC_REGNUM),
5232                                                const0_rtx),
5233                                    gen_rtx_SET (operands[0],
5234                                                 (gen_rtx_MINUS (SImode,
5235                                                                 const0_rtx,
5236                                                                 operands[1])))));
5237     }
5238   else
5239     {
5240      /* Emit the pattern:
5241         eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5242      */
5243      emit_insn (gen_rtx_SET (operands[0],
5244                              gen_rtx_XOR (SImode,
5245                                           gen_rtx_ASHIFTRT (SImode,
5246                                                             operands[1],
5247                                                             GEN_INT (31)),
5248                                           operands[1])));
5249      emit_insn (gen_rtx_SET (operands[0],
5250                              gen_rtx_MINUS (SImode,
5251                                             gen_rtx_ASHIFTRT (SImode,
5252                                                               operands[1],
5253                                                               GEN_INT (31)),
5254                                             operands[0])));
5255     }
5256   DONE;
5257  }
5258  [(set_attr "conds" "clob,*")
5259   (set_attr "shift" "1")
5260   (set_attr "predicable" "no, yes")
5261   (set_attr "length" "8")
5262   (set_attr "type" "multiple")]
5263)
5264
5265(define_expand "abssf2"
5266  [(set (match_operand:SF         0 "s_register_operand")
5267	(abs:SF (match_operand:SF 1 "s_register_operand")))]
5268  "TARGET_32BIT && TARGET_HARD_FLOAT"
5269  "")
5270
5271(define_expand "absdf2"
5272  [(set (match_operand:DF         0 "s_register_operand")
5273	(abs:DF (match_operand:DF 1 "s_register_operand")))]
5274  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5275  "")
5276
5277(define_expand "sqrtsf2"
5278  [(set (match_operand:SF 0 "s_register_operand")
5279	(sqrt:SF (match_operand:SF 1 "s_register_operand")))]
5280  "TARGET_32BIT && TARGET_HARD_FLOAT"
5281  "")
5282
5283(define_expand "sqrtdf2"
5284  [(set (match_operand:DF 0 "s_register_operand")
5285	(sqrt:DF (match_operand:DF 1 "s_register_operand")))]
5286  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5287  "")
5288
5289(define_expand "one_cmplsi2"
5290  [(set (match_operand:SI         0 "s_register_operand")
5291	(not:SI (match_operand:SI 1 "s_register_operand")))]
5292  "TARGET_EITHER"
5293  ""
5294)
5295
5296(define_insn "*arm_one_cmplsi2"
5297  [(set (match_operand:SI         0 "s_register_operand" "=l,r")
5298	(not:SI (match_operand:SI 1 "s_register_operand"  "l,r")))]
5299  "TARGET_32BIT"
5300  "mvn%?\\t%0, %1"
5301  [(set_attr "predicable" "yes")
5302   (set_attr "predicable_short_it" "yes,no")
5303   (set_attr "arch" "t2,*")
5304   (set_attr "length" "4")
5305   (set_attr "type" "mvn_reg")]
5306)
5307
5308(define_insn "*notsi_compare0"
5309  [(set (reg:CC_NZ CC_REGNUM)
5310	(compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5311			 (const_int 0)))
5312   (set (match_operand:SI 0 "s_register_operand" "=r")
5313	(not:SI (match_dup 1)))]
5314  "TARGET_32BIT"
5315  "mvns%?\\t%0, %1"
5316  [(set_attr "conds" "set")
5317   (set_attr "type" "mvn_reg")]
5318)
5319
5320(define_insn "*notsi_compare0_scratch"
5321  [(set (reg:CC_NZ CC_REGNUM)
5322	(compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5323			 (const_int 0)))
5324   (clobber (match_scratch:SI 0 "=r"))]
5325  "TARGET_32BIT"
5326  "mvns%?\\t%0, %1"
5327  [(set_attr "conds" "set")
5328   (set_attr "type" "mvn_reg")]
5329)
5330
5331;; Fixed <--> Floating conversion insns
5332
5333(define_expand "floatsihf2"
5334  [(set (match_operand:HF           0 "general_operand")
5335	(float:HF (match_operand:SI 1 "general_operand")))]
5336  "TARGET_EITHER"
5337  "
5338  {
5339    rtx op1 = gen_reg_rtx (SFmode);
5340    expand_float (op1, operands[1], 0);
5341    op1 = convert_to_mode (HFmode, op1, 0);
5342    emit_move_insn (operands[0], op1);
5343    DONE;
5344  }"
5345)
5346
5347(define_expand "floatdihf2"
5348  [(set (match_operand:HF           0 "general_operand")
5349	(float:HF (match_operand:DI 1 "general_operand")))]
5350  "TARGET_EITHER"
5351  "
5352  {
5353    rtx op1 = gen_reg_rtx (SFmode);
5354    expand_float (op1, operands[1], 0);
5355    op1 = convert_to_mode (HFmode, op1, 0);
5356    emit_move_insn (operands[0], op1);
5357    DONE;
5358  }"
5359)
5360
5361(define_expand "floatsisf2"
5362  [(set (match_operand:SF           0 "s_register_operand")
5363	(float:SF (match_operand:SI 1 "s_register_operand")))]
5364  "TARGET_32BIT && TARGET_HARD_FLOAT"
5365  "
5366")
5367
5368(define_expand "floatsidf2"
5369  [(set (match_operand:DF           0 "s_register_operand")
5370	(float:DF (match_operand:SI 1 "s_register_operand")))]
5371  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5372  "
5373")
5374
5375(define_expand "fix_trunchfsi2"
5376  [(set (match_operand:SI         0 "general_operand")
5377	(fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
5378  "TARGET_EITHER"
5379  "
5380  {
5381    rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5382    expand_fix (operands[0], op1, 0);
5383    DONE;
5384  }"
5385)
5386
5387(define_expand "fix_trunchfdi2"
5388  [(set (match_operand:DI         0 "general_operand")
5389	(fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
5390  "TARGET_EITHER"
5391  "
5392  {
5393    rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5394    expand_fix (operands[0], op1, 0);
5395    DONE;
5396  }"
5397)
5398
5399(define_expand "fix_truncsfsi2"
5400  [(set (match_operand:SI         0 "s_register_operand")
5401	(fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
5402  "TARGET_32BIT && TARGET_HARD_FLOAT"
5403  "
5404")
5405
5406(define_expand "fix_truncdfsi2"
5407  [(set (match_operand:SI         0 "s_register_operand")
5408	(fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
5409  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5410  "
5411")
5412
5413;; Truncation insns
5414
5415(define_expand "truncdfsf2"
5416  [(set (match_operand:SF  0 "s_register_operand")
5417	(float_truncate:SF
5418	 (match_operand:DF 1 "s_register_operand")))]
5419  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5420  ""
5421)
5422
5423;; DFmode to HFmode conversions on targets without a single-step hardware
5424;; instruction for it would have to go through SFmode.  This is dangerous
5425;; as it introduces double rounding.
5426;;
5427;; Disable this pattern unless we are in an unsafe math mode, or we have
5428;; a single-step instruction.
5429
5430(define_expand "truncdfhf2"
5431  [(set (match_operand:HF  0 "s_register_operand")
5432	(float_truncate:HF
5433	 (match_operand:DF 1 "s_register_operand")))]
5434  "(TARGET_EITHER && flag_unsafe_math_optimizations)
5435   || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
5436{
5437  /* We don't have a direct instruction for this, so we must be in
5438     an unsafe math mode, and going via SFmode.  */
5439
5440  if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5441    {
5442      rtx op1;
5443      op1 = convert_to_mode (SFmode, operands[1], 0);
5444      op1 = convert_to_mode (HFmode, op1, 0);
5445      emit_move_insn (operands[0], op1);
5446      DONE;
5447    }
5448  /* Otherwise, we will pick this up as a single instruction with
5449     no intermediary rounding.  */
5450}
5451)
5452
5453;; Zero and sign extension instructions.
5454
5455(define_expand "zero_extend<mode>di2"
5456  [(set (match_operand:DI 0 "s_register_operand" "")
5457	(zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
5458  "TARGET_32BIT <qhs_zextenddi_cond>"
5459  {
5460    rtx res_lo, res_hi, op0_lo, op0_hi;
5461    res_lo = gen_lowpart (SImode, operands[0]);
5462    res_hi = gen_highpart (SImode, operands[0]);
5463    if (can_create_pseudo_p ())
5464      {
5465	op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5466	op0_hi = gen_reg_rtx (SImode);
5467      }
5468    else
5469      {
5470	op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5471	op0_hi = res_hi;
5472      }
5473    if (<MODE>mode != SImode)
5474      emit_insn (gen_rtx_SET (op0_lo,
5475			      gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5476    emit_insn (gen_movsi (op0_hi, const0_rtx));
5477    if (res_lo != op0_lo)
5478      emit_move_insn (res_lo, op0_lo);
5479    if (res_hi != op0_hi)
5480      emit_move_insn (res_hi, op0_hi);
5481    DONE;
5482  }
5483)
5484
5485(define_expand "extend<mode>di2"
5486  [(set (match_operand:DI 0 "s_register_operand" "")
5487	(sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5488  "TARGET_32BIT <qhs_sextenddi_cond>"
5489  {
5490    rtx res_lo, res_hi, op0_lo, op0_hi;
5491    res_lo = gen_lowpart (SImode, operands[0]);
5492    res_hi = gen_highpart (SImode, operands[0]);
5493    if (can_create_pseudo_p ())
5494      {
5495	op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5496	op0_hi = gen_reg_rtx (SImode);
5497      }
5498    else
5499      {
5500	op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5501	op0_hi = res_hi;
5502      }
5503    if (<MODE>mode != SImode)
5504      emit_insn (gen_rtx_SET (op0_lo,
5505			      gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5506    emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5507    if (res_lo != op0_lo)
5508      emit_move_insn (res_lo, op0_lo);
5509    if (res_hi != op0_hi)
5510      emit_move_insn (res_hi, op0_hi);
5511    DONE;
5512  }
5513)
5514
5515;; Splits for all extensions to DImode
5516(define_split
5517  [(set (match_operand:DI 0 "s_register_operand" "")
5518        (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5519  "TARGET_32BIT"
5520  [(set (match_dup 0) (match_dup 1))]
5521{
5522  rtx lo_part = gen_lowpart (SImode, operands[0]);
5523  machine_mode src_mode = GET_MODE (operands[1]);
5524
5525  if (src_mode == SImode)
5526    emit_move_insn (lo_part, operands[1]);
5527  else
5528    emit_insn (gen_rtx_SET (lo_part,
5529			    gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5530  operands[0] = gen_highpart (SImode, operands[0]);
5531  operands[1] = const0_rtx;
5532})
5533
5534(define_split
5535  [(set (match_operand:DI 0 "s_register_operand" "")
5536        (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5537  "TARGET_32BIT"
5538  [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5539{
5540  rtx lo_part = gen_lowpart (SImode, operands[0]);
5541  machine_mode src_mode = GET_MODE (operands[1]);
5542
5543  if (src_mode == SImode)
5544    emit_move_insn (lo_part, operands[1]);
5545  else
5546    emit_insn (gen_rtx_SET (lo_part,
5547			    gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5548  operands[1] = lo_part;
5549  operands[0] = gen_highpart (SImode, operands[0]);
5550})
5551
5552(define_expand "zero_extendhisi2"
5553  [(set (match_operand:SI 0 "s_register_operand")
5554	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5555  "TARGET_EITHER"
5556{
5557  if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5558    {
5559      emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5560      DONE;
5561    }
5562  if (!arm_arch6 && !MEM_P (operands[1]))
5563    {
5564      rtx t = gen_lowpart (SImode, operands[1]);
5565      rtx tmp = gen_reg_rtx (SImode);
5566      emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5567      emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5568      DONE;
5569    }
5570})
5571
5572(define_split
5573  [(set (match_operand:SI 0 "s_register_operand" "")
5574	(zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5575  "!TARGET_THUMB2 && !arm_arch6"
5576  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5577   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5578{
5579  operands[2] = gen_lowpart (SImode, operands[1]);
5580})
5581
5582(define_insn "*arm_zero_extendhisi2"
5583  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5584	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5585  "TARGET_ARM && arm_arch4 && !arm_arch6"
5586  "@
5587   #
5588   ldrh%?\\t%0, %1"
5589  [(set_attr "type" "alu_shift_reg,load_byte")
5590   (set_attr "predicable" "yes")]
5591)
5592
5593(define_insn "*arm_zero_extendhisi2_v6"
5594  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5595	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5596  "TARGET_ARM && arm_arch6"
5597  "@
5598   uxth%?\\t%0, %1
5599   ldrh%?\\t%0, %1"
5600  [(set_attr "predicable" "yes")
5601   (set_attr "type" "extend,load_byte")]
5602)
5603
5604(define_insn "*arm_zero_extendhisi2addsi"
5605  [(set (match_operand:SI 0 "s_register_operand" "=r")
5606	(plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5607		 (match_operand:SI 2 "s_register_operand" "r")))]
5608  "TARGET_INT_SIMD"
5609  "uxtah%?\\t%0, %2, %1"
5610  [(set_attr "type" "alu_shift_reg")
5611   (set_attr "predicable" "yes")]
5612)
5613
5614(define_expand "zero_extendqisi2"
5615  [(set (match_operand:SI 0 "s_register_operand")
5616	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5617  "TARGET_EITHER"
5618{
5619  if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5620    {
5621      emit_insn (gen_andsi3 (operands[0],
5622			     gen_lowpart (SImode, operands[1]),
5623					  GEN_INT (255)));
5624      DONE;
5625    }
5626  if (!arm_arch6 && !MEM_P (operands[1]))
5627    {
5628      rtx t = gen_lowpart (SImode, operands[1]);
5629      rtx tmp = gen_reg_rtx (SImode);
5630      emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5631      emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5632      DONE;
5633    }
5634})
5635
5636(define_split
5637  [(set (match_operand:SI 0 "s_register_operand" "")
5638	(zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5639  "!arm_arch6"
5640  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5641   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5642{
5643  operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5644  if (TARGET_ARM)
5645    {
5646      emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5647      DONE;
5648    }
5649})
5650
5651(define_insn "*arm_zero_extendqisi2"
5652  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5653	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5654  "TARGET_ARM && !arm_arch6"
5655  "@
5656   #
5657   ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5658  [(set_attr "length" "8,4")
5659   (set_attr "type" "alu_shift_reg,load_byte")
5660   (set_attr "predicable" "yes")]
5661)
5662
5663(define_insn "*arm_zero_extendqisi2_v6"
5664  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5665	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5666  "TARGET_ARM && arm_arch6"
5667  "@
5668   uxtb%?\\t%0, %1
5669   ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5670  [(set_attr "type" "extend,load_byte")
5671   (set_attr "predicable" "yes")]
5672)
5673
5674(define_insn "*arm_zero_extendqisi2addsi"
5675  [(set (match_operand:SI 0 "s_register_operand" "=r")
5676	(plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5677		 (match_operand:SI 2 "s_register_operand" "r")))]
5678  "TARGET_INT_SIMD"
5679  "uxtab%?\\t%0, %2, %1"
5680  [(set_attr "predicable" "yes")
5681   (set_attr "type" "alu_shift_reg")]
5682)
5683
5684(define_split
5685  [(set (match_operand:SI 0 "s_register_operand" "")
5686	(zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5687   (clobber (match_operand:SI 2 "s_register_operand" ""))]
5688  "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5689  [(set (match_dup 2) (match_dup 1))
5690   (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5691  ""
5692)
5693
5694(define_split
5695  [(set (match_operand:SI 0 "s_register_operand" "")
5696	(zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5697   (clobber (match_operand:SI 2 "s_register_operand" ""))]
5698  "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5699  [(set (match_dup 2) (match_dup 1))
5700   (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5701  ""
5702)
5703
5704
5705(define_split
5706  [(set (match_operand:SI 0 "s_register_operand" "")
5707	(IOR_XOR:SI (and:SI (ashift:SI
5708			     (match_operand:SI 1 "s_register_operand" "")
5709			     (match_operand:SI 2 "const_int_operand" ""))
5710			    (match_operand:SI 3 "const_int_operand" ""))
5711		    (zero_extend:SI
5712		     (match_operator 5 "subreg_lowpart_operator"
5713		      [(match_operand:SI 4 "s_register_operand" "")]))))]
5714  "TARGET_32BIT
5715   && (UINTVAL (operands[3])
5716       == (GET_MODE_MASK (GET_MODE (operands[5]))
5717           & (GET_MODE_MASK (GET_MODE (operands[5]))
5718	      << (INTVAL (operands[2])))))"
5719  [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5720				  (match_dup 4)))
5721   (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5722  "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5723)
5724
5725(define_insn "*compareqi_eq0"
5726  [(set (reg:CC_Z CC_REGNUM)
5727	(compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5728			 (const_int 0)))]
5729  "TARGET_32BIT"
5730  "tst%?\\t%0, #255"
5731  [(set_attr "conds" "set")
5732   (set_attr "predicable" "yes")
5733   (set_attr "type" "logic_imm")]
5734)
5735
5736(define_expand "extendhisi2"
5737  [(set (match_operand:SI 0 "s_register_operand")
5738	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5739  "TARGET_EITHER"
5740{
5741  if (TARGET_THUMB1)
5742    {
5743      emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5744      DONE;
5745    }
5746  if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5747    {
5748      emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5749      DONE;
5750    }
5751
5752  if (!arm_arch6 && !MEM_P (operands[1]))
5753    {
5754      rtx t = gen_lowpart (SImode, operands[1]);
5755      rtx tmp = gen_reg_rtx (SImode);
5756      emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5757      emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5758      DONE;
5759    }
5760})
5761
5762(define_split
5763  [(parallel
5764    [(set (match_operand:SI 0 "register_operand" "")
5765	  (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5766     (clobber (match_scratch:SI 2 ""))])]
5767  "!arm_arch6"
5768  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5769   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5770{
5771  operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5772})
5773
5774;; This pattern will only be used when ldsh is not available
5775(define_expand "extendhisi2_mem"
5776  [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5777   (set (match_dup 3)
5778	(zero_extend:SI (match_dup 7)))
5779   (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5780   (set (match_operand:SI 0 "" "")
5781	(ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5782  "TARGET_ARM"
5783  "
5784  {
5785    rtx mem1, mem2;
5786    rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5787
5788    mem1 = change_address (operands[1], QImode, addr);
5789    mem2 = change_address (operands[1], QImode,
5790			   plus_constant (Pmode, addr, 1));
5791    operands[0] = gen_lowpart (SImode, operands[0]);
5792    operands[1] = mem1;
5793    operands[2] = gen_reg_rtx (SImode);
5794    operands[3] = gen_reg_rtx (SImode);
5795    operands[6] = gen_reg_rtx (SImode);
5796    operands[7] = mem2;
5797
5798    if (BYTES_BIG_ENDIAN)
5799      {
5800	operands[4] = operands[2];
5801	operands[5] = operands[3];
5802      }
5803    else
5804      {
5805	operands[4] = operands[3];
5806	operands[5] = operands[2];
5807      }
5808  }"
5809)
5810
5811(define_split
5812  [(set (match_operand:SI 0 "register_operand" "")
5813	(sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5814  "!arm_arch6"
5815  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5816   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5817{
5818  operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5819})
5820
5821(define_insn "*arm_extendhisi2"
5822  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5823	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5824  "TARGET_ARM && arm_arch4 && !arm_arch6"
5825  "@
5826   #
5827   ldrsh%?\\t%0, %1"
5828  [(set_attr "length" "8,4")
5829   (set_attr "type" "alu_shift_reg,load_byte")
5830   (set_attr "predicable" "yes")]
5831)
5832
5833;; ??? Check Thumb-2 pool range
5834(define_insn "*arm_extendhisi2_v6"
5835  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5836	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5837  "TARGET_32BIT && arm_arch6"
5838  "@
5839   sxth%?\\t%0, %1
5840   ldrsh%?\\t%0, %1"
5841  [(set_attr "type" "extend,load_byte")
5842   (set_attr "predicable" "yes")]
5843)
5844
5845(define_insn "*arm_extendhisi2addsi"
5846  [(set (match_operand:SI 0 "s_register_operand" "=r")
5847	(plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5848		 (match_operand:SI 2 "s_register_operand" "r")))]
5849  "TARGET_INT_SIMD"
5850  "sxtah%?\\t%0, %2, %1"
5851  [(set_attr "type" "alu_shift_reg")]
5852)
5853
5854(define_expand "extendqihi2"
5855  [(set (match_dup 2)
5856	(ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5857		   (const_int 24)))
5858   (set (match_operand:HI 0 "s_register_operand")
5859	(ashiftrt:SI (match_dup 2)
5860		     (const_int 24)))]
5861  "TARGET_ARM"
5862  "
5863  {
5864    if (arm_arch4 && MEM_P (operands[1]))
5865      {
5866	emit_insn (gen_rtx_SET (operands[0],
5867				gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5868	DONE;
5869      }
5870    if (!s_register_operand (operands[1], QImode))
5871      operands[1] = copy_to_mode_reg (QImode, operands[1]);
5872    operands[0] = gen_lowpart (SImode, operands[0]);
5873    operands[1] = gen_lowpart (SImode, operands[1]);
5874    operands[2] = gen_reg_rtx (SImode);
5875  }"
5876)
5877
5878(define_insn "*arm_extendqihi_insn"
5879  [(set (match_operand:HI 0 "s_register_operand" "=r")
5880	(sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5881  "TARGET_ARM && arm_arch4"
5882  "ldrsb%?\\t%0, %1"
5883  [(set_attr "type" "load_byte")
5884   (set_attr "predicable" "yes")]
5885)
5886
5887(define_expand "extendqisi2"
5888  [(set (match_operand:SI 0 "s_register_operand")
5889	(sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5890  "TARGET_EITHER"
5891{
5892  if (!arm_arch4 && MEM_P (operands[1]))
5893    operands[1] = copy_to_mode_reg (QImode, operands[1]);
5894
5895  if (!arm_arch6 && !MEM_P (operands[1]))
5896    {
5897      rtx t = gen_lowpart (SImode, operands[1]);
5898      rtx tmp = gen_reg_rtx (SImode);
5899      emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5900      emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5901      DONE;
5902    }
5903})
5904
5905(define_split
5906  [(set (match_operand:SI 0 "register_operand" "")
5907	(sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5908  "!arm_arch6"
5909  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5910   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5911{
5912  operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5913})
5914
5915(define_insn "*arm_extendqisi"
5916  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5917	(sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5918  "TARGET_ARM && arm_arch4 && !arm_arch6"
5919  "@
5920   #
5921   ldrsb%?\\t%0, %1"
5922  [(set_attr "length" "8,4")
5923   (set_attr "type" "alu_shift_reg,load_byte")
5924   (set_attr "predicable" "yes")]
5925)
5926
5927(define_insn "*arm_extendqisi_v6"
5928  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5929	(sign_extend:SI
5930	 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5931  "TARGET_ARM && arm_arch6"
5932  "@
5933   sxtb%?\\t%0, %1
5934   ldrsb%?\\t%0, %1"
5935  [(set_attr "type" "extend,load_byte")
5936   (set_attr "predicable" "yes")]
5937)
5938
5939(define_insn "*arm_extendqisi2addsi"
5940  [(set (match_operand:SI 0 "s_register_operand" "=r")
5941	(plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5942		 (match_operand:SI 2 "s_register_operand" "r")))]
5943  "TARGET_INT_SIMD"
5944  "sxtab%?\\t%0, %2, %1"
5945  [(set_attr "type" "alu_shift_reg")
5946   (set_attr "predicable" "yes")]
5947)
5948
5949(define_insn "arm_<sup>xtb16"
5950  [(set (match_operand:SI 0 "s_register_operand" "=r")
5951	(unspec:SI
5952	  [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5953  "TARGET_INT_SIMD"
5954  "<sup>xtb16%?\\t%0, %1"
5955  [(set_attr "predicable" "yes")
5956   (set_attr "type" "alu_dsp_reg")])
5957
5958(define_insn "arm_<simd32_op>"
5959  [(set (match_operand:SI 0 "s_register_operand" "=r")
5960	(unspec:SI
5961	  [(match_operand:SI 1 "s_register_operand" "r")
5962	   (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5963  "TARGET_INT_SIMD"
5964  "<simd32_op>%?\\t%0, %1, %2"
5965  [(set_attr "predicable" "yes")
5966   (set_attr "type" "alu_dsp_reg")])
5967
5968(define_insn "arm_usada8"
5969  [(set (match_operand:SI 0 "s_register_operand" "=r")
5970	(unspec:SI
5971	  [(match_operand:SI 1 "s_register_operand" "r")
5972	  (match_operand:SI 2 "s_register_operand" "r")
5973	  (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5974  "TARGET_INT_SIMD"
5975  "usada8%?\\t%0, %1, %2, %3"
5976  [(set_attr "predicable" "yes")
5977   (set_attr "type" "alu_dsp_reg")])
5978
5979(define_insn "arm_<simd32_op>"
5980  [(set (match_operand:DI 0 "s_register_operand" "=r")
5981	(unspec:DI
5982	  [(match_operand:SI 1 "s_register_operand" "r")
5983	   (match_operand:SI 2 "s_register_operand" "r")
5984	   (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5985  "TARGET_INT_SIMD"
5986  "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5987  [(set_attr "predicable" "yes")
5988   (set_attr "type" "smlald")])
5989
5990(define_insn "arm_<simd32_op>"
5991  [(set (match_operand:SI 0 "s_register_operand" "=r")
5992	(unspec:SI
5993	  [(match_operand:SI 1 "s_register_operand" "r")
5994	   (match_operand:SI 2 "s_register_operand" "r")] SIMD32_GE))
5995   (set (reg:CC APSRGE_REGNUM)
5996	(unspec:CC [(reg:CC APSRGE_REGNUM)] UNSPEC_GE_SET))]
5997  "TARGET_INT_SIMD"
5998  "<simd32_op>%?\\t%0, %1, %2"
5999  [(set_attr "predicable" "yes")
6000   (set_attr "type" "alu_sreg")])
6001
6002(define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6003  [(set (match_operand:SI 0 "s_register_operand" "=r")
6004	(unspec:SI
6005	  [(match_operand:SI 1 "s_register_operand" "r")
6006	   (match_operand:SI 2 "s_register_operand" "r")
6007	   (match_operand:SI 3 "s_register_operand" "r")] SIMD32_TERNOP_Q))]
6008  "TARGET_INT_SIMD && <add_clobber_q_pred>"
6009  "<simd32_op>%?\\t%0, %1, %2, %3"
6010  [(set_attr "predicable" "yes")
6011   (set_attr "type" "alu_sreg")])
6012
6013(define_expand "arm_<simd32_op>"
6014  [(set (match_operand:SI 0 "s_register_operand")
6015	(unspec:SI
6016	  [(match_operand:SI 1 "s_register_operand")
6017	   (match_operand:SI 2 "s_register_operand")
6018	   (match_operand:SI 3 "s_register_operand")] SIMD32_TERNOP_Q))]
6019  "TARGET_INT_SIMD"
6020  {
6021    if (ARM_Q_BIT_READ)
6022      emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6023						operands[2], operands[3]));
6024    else
6025      emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6026					   operands[2], operands[3]));
6027    DONE;
6028  }
6029)
6030
6031(define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6032  [(set (match_operand:SI 0 "s_register_operand" "=r")
6033	(unspec:SI
6034	  [(match_operand:SI 1 "s_register_operand" "r")
6035	   (match_operand:SI 2 "s_register_operand" "r")] SIMD32_BINOP_Q))]
6036  "TARGET_INT_SIMD && <add_clobber_q_pred>"
6037  "<simd32_op>%?\\t%0, %1, %2"
6038  [(set_attr "predicable" "yes")
6039   (set_attr "type" "alu_sreg")])
6040
6041(define_expand "arm_<simd32_op>"
6042  [(set (match_operand:SI 0 "s_register_operand")
6043	(unspec:SI
6044	  [(match_operand:SI 1 "s_register_operand")
6045	   (match_operand:SI 2 "s_register_operand")] SIMD32_BINOP_Q))]
6046  "TARGET_INT_SIMD"
6047  {
6048    if (ARM_Q_BIT_READ)
6049      emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6050						operands[2]));
6051    else
6052      emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6053					   operands[2]));
6054    DONE;
6055  }
6056)
6057
6058(define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6059  [(set (match_operand:SI 0 "s_register_operand" "=r")
6060	(unspec:SI
6061	  [(match_operand:SI 1 "s_register_operand" "r")
6062	   (match_operand:SI 2 "<sup>sat16_imm" "i")] USSAT16))]
6063  "TARGET_INT_SIMD && <add_clobber_q_pred>"
6064  "<simd32_op>%?\\t%0, %2, %1"
6065  [(set_attr "predicable" "yes")
6066   (set_attr "type" "alu_sreg")])
6067
6068(define_expand "arm_<simd32_op>"
6069  [(set (match_operand:SI 0 "s_register_operand")
6070	(unspec:SI
6071	  [(match_operand:SI 1 "s_register_operand")
6072	   (match_operand:SI 2 "<sup>sat16_imm")] USSAT16))]
6073  "TARGET_INT_SIMD"
6074  {
6075    if (ARM_Q_BIT_READ)
6076      emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6077						operands[2]));
6078    else
6079      emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6080					   operands[2]));
6081    DONE;
6082  }
6083)
6084
6085(define_insn "arm_sel"
6086  [(set (match_operand:SI 0 "s_register_operand" "=r")
6087	(unspec:SI
6088	  [(match_operand:SI 1 "s_register_operand" "r")
6089	   (match_operand:SI 2 "s_register_operand" "r")
6090	   (reg:CC APSRGE_REGNUM)] UNSPEC_SEL))]
6091  "TARGET_INT_SIMD"
6092  "sel%?\\t%0, %1, %2"
6093  [(set_attr "predicable" "yes")
6094   (set_attr "type" "alu_sreg")])
6095
6096(define_expand "extendsfdf2"
6097  [(set (match_operand:DF                  0 "s_register_operand")
6098	(float_extend:DF (match_operand:SF 1 "s_register_operand")))]
6099  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6100  ""
6101)
6102
6103;; HFmode -> DFmode conversions where we don't have an instruction for it
6104;; must go through SFmode.
6105;;
6106;; This is always safe for an extend.
6107
6108(define_expand "extendhfdf2"
6109  [(set (match_operand:DF		   0 "s_register_operand")
6110	(float_extend:DF (match_operand:HF 1 "s_register_operand")))]
6111  "TARGET_EITHER"
6112{
6113  /* We don't have a direct instruction for this, so go via SFmode.  */
6114  if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
6115    {
6116      rtx op1;
6117      op1 = convert_to_mode (SFmode, operands[1], 0);
6118      op1 = convert_to_mode (DFmode, op1, 0);
6119      emit_insn (gen_movdf (operands[0], op1));
6120      DONE;
6121    }
6122  /* Otherwise, we're done producing RTL and will pick up the correct
6123     pattern to do this with one rounding-step in a single instruction.  */
6124}
6125)
6126
6127;; Move insns (including loads and stores)
6128
6129;; XXX Just some ideas about movti.
6130;; I don't think these are a good idea on the arm, there just aren't enough
6131;; registers
6132;;(define_expand "loadti"
6133;;  [(set (match_operand:TI 0 "s_register_operand")
6134;;	(mem:TI (match_operand:SI 1 "address_operand")))]
6135;;  "" "")
6136
6137;;(define_expand "storeti"
6138;;  [(set (mem:TI (match_operand:TI 0 "address_operand"))
6139;;	(match_operand:TI 1 "s_register_operand"))]
6140;;  "" "")
6141
6142;;(define_expand "movti"
6143;;  [(set (match_operand:TI 0 "general_operand")
6144;;	(match_operand:TI 1 "general_operand"))]
6145;;  ""
6146;;  "
6147;;{
6148;;  rtx insn;
6149;;
6150;;  if (MEM_P (operands[0]) && MEM_P (operands[1]))
6151;;    operands[1] = copy_to_reg (operands[1]);
6152;;  if (MEM_P (operands[0]))
6153;;    insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
6154;;  else if (MEM_P (operands[1]))
6155;;    insn = gen_loadti (operands[0], XEXP (operands[1], 0));
6156;;  else
6157;;    FAIL;
6158;;
6159;;  emit_insn (insn);
6160;;  DONE;
6161;;}")
6162
6163;; Recognize garbage generated above.
6164
6165;;(define_insn ""
6166;;  [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6167;;	(match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6168;;  ""
6169;;  "*
6170;;  {
6171;;    register mem = (which_alternative < 3);
6172;;    register const char *template;
6173;;
6174;;    operands[mem] = XEXP (operands[mem], 0);
6175;;    switch (which_alternative)
6176;;      {
6177;;      case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6178;;      case 1: template = \"ldmia\\t%1!, %M0\"; break;
6179;;      case 2: template = \"ldmia\\t%1, %M0\"; break;
6180;;      case 3: template = \"stmdb\\t%0!, %M1\"; break;
6181;;      case 4: template = \"stmia\\t%0!, %M1\"; break;
6182;;      case 5: template = \"stmia\\t%0, %M1\"; break;
6183;;      }
6184;;    output_asm_insn (template, operands);
6185;;    return \"\";
6186;;  }")
6187
6188(define_expand "movdi"
6189  [(set (match_operand:DI 0 "general_operand")
6190	(match_operand:DI 1 "general_operand"))]
6191  "TARGET_EITHER"
6192  "
6193  gcc_checking_assert (aligned_operand (operands[0], DImode));
6194  gcc_checking_assert (aligned_operand (operands[1], DImode));
6195  if (can_create_pseudo_p ())
6196    {
6197      if (!REG_P (operands[0]))
6198	operands[1] = force_reg (DImode, operands[1]);
6199    }
6200  if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
6201      && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
6202    {
6203      /* Avoid LDRD's into an odd-numbered register pair in ARM state
6204	 when expanding function calls.  */
6205      gcc_assert (can_create_pseudo_p ());
6206      if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
6207	{
6208	  /* Perform load into legal reg pair first, then move.  */
6209	  rtx reg = gen_reg_rtx (DImode);
6210	  emit_insn (gen_movdi (reg, operands[1]));
6211	  operands[1] = reg;
6212	}
6213      emit_move_insn (gen_lowpart (SImode, operands[0]),
6214		      gen_lowpart (SImode, operands[1]));
6215      emit_move_insn (gen_highpart (SImode, operands[0]),
6216		      gen_highpart (SImode, operands[1]));
6217      DONE;
6218    }
6219  else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
6220	   && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
6221    {
6222      /* Avoid STRD's from an odd-numbered register pair in ARM state
6223	 when expanding function prologue.  */
6224      gcc_assert (can_create_pseudo_p ());
6225      rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
6226		       ? gen_reg_rtx (DImode)
6227		       : operands[0];
6228      emit_move_insn (gen_lowpart (SImode, split_dest),
6229		      gen_lowpart (SImode, operands[1]));
6230      emit_move_insn (gen_highpart (SImode, split_dest),
6231		      gen_highpart (SImode, operands[1]));
6232      if (split_dest != operands[0])
6233	emit_insn (gen_movdi (operands[0], split_dest));
6234      DONE;
6235    }
6236  "
6237)
6238
6239(define_insn "*arm_movdi"
6240  [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
6241	(match_operand:DI 1 "di_operand"              "rDa,Db,Dc,mi,r"))]
6242  "TARGET_32BIT
6243   && !(TARGET_HARD_FLOAT)
6244   && !(TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT)
6245   && !TARGET_IWMMXT
6246   && (   register_operand (operands[0], DImode)
6247       || register_operand (operands[1], DImode))"
6248  "*
6249  switch (which_alternative)
6250    {
6251    case 0:
6252    case 1:
6253    case 2:
6254      return \"#\";
6255    case 3:
6256      /* Cannot load it directly, split to load it via MOV / MOVT.  */
6257      if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6258	return \"#\";
6259      /* Fall through.  */
6260    default:
6261      return output_move_double (operands, true, NULL);
6262    }
6263  "
6264  [(set_attr "length" "8,12,16,8,8")
6265   (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6266   (set_attr "arm_pool_range" "*,*,*,1020,*")
6267   (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6268   (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6269   (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6270)
6271
6272(define_split
6273  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6274	(match_operand:ANY64 1 "immediate_operand" ""))]
6275  "TARGET_32BIT
6276   && reload_completed
6277   && (arm_disable_literal_pool
6278       || (arm_const_double_inline_cost (operands[1])
6279	   <= arm_max_const_double_inline_cost ()))"
6280  [(const_int 0)]
6281  "
6282  arm_split_constant (SET, SImode, curr_insn,
6283		      INTVAL (gen_lowpart (SImode, operands[1])),
6284		      gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6285  arm_split_constant (SET, SImode, curr_insn,
6286		      INTVAL (gen_highpart_mode (SImode,
6287						 GET_MODE (operands[0]),
6288						 operands[1])),
6289		      gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6290  DONE;
6291  "
6292)
6293
6294; If optimizing for size, or if we have load delay slots, then
6295; we want to split the constant into two separate operations.
6296; In both cases this may split a trivial part into a single data op
6297; leaving a single complex constant to load.  We can also get longer
6298; offsets in a LDR which means we get better chances of sharing the pool
6299; entries.  Finally, we can normally do a better job of scheduling
6300; LDR instructions than we can with LDM.
6301; This pattern will only match if the one above did not.
6302(define_split
6303  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6304	(match_operand:ANY64 1 "const_double_operand" ""))]
6305  "TARGET_ARM && reload_completed
6306   && arm_const_double_by_parts (operands[1])"
6307  [(set (match_dup 0) (match_dup 1))
6308   (set (match_dup 2) (match_dup 3))]
6309  "
6310  operands[2] = gen_highpart (SImode, operands[0]);
6311  operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6312				   operands[1]);
6313  operands[0] = gen_lowpart (SImode, operands[0]);
6314  operands[1] = gen_lowpart (SImode, operands[1]);
6315  "
6316)
6317
6318(define_split
6319  [(set (match_operand:ANY64_BF 0 "arm_general_register_operand" "")
6320	(match_operand:ANY64_BF 1 "arm_general_register_operand" ""))]
6321  "TARGET_EITHER && reload_completed"
6322  [(set (match_dup 0) (match_dup 1))
6323   (set (match_dup 2) (match_dup 3))]
6324  "
6325  operands[2] = gen_highpart (SImode, operands[0]);
6326  operands[3] = gen_highpart (SImode, operands[1]);
6327  operands[0] = gen_lowpart (SImode, operands[0]);
6328  operands[1] = gen_lowpart (SImode, operands[1]);
6329
6330  /* Handle a partial overlap.  */
6331  if (rtx_equal_p (operands[0], operands[3]))
6332    {
6333      rtx tmp0 = operands[0];
6334      rtx tmp1 = operands[1];
6335
6336      operands[0] = operands[2];
6337      operands[1] = operands[3];
6338      operands[2] = tmp0;
6339      operands[3] = tmp1;
6340    }
6341  "
6342)
6343
6344;; We can't actually do base+index doubleword loads if the index and
6345;; destination overlap.  Split here so that we at least have chance to
6346;; schedule.
6347(define_split
6348  [(set (match_operand:DI 0 "s_register_operand" "")
6349	(mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6350			 (match_operand:SI 2 "s_register_operand" ""))))]
6351  "TARGET_LDRD
6352  && reg_overlap_mentioned_p (operands[0], operands[1])
6353  && reg_overlap_mentioned_p (operands[0], operands[2])"
6354  [(set (match_dup 4)
6355	(plus:SI (match_dup 1)
6356		 (match_dup 2)))
6357   (set (match_dup 0)
6358	(mem:DI (match_dup 4)))]
6359  "
6360  operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6361  "
6362)
6363
6364(define_expand "movsi"
6365  [(set (match_operand:SI 0 "general_operand")
6366        (match_operand:SI 1 "general_operand"))]
6367  "TARGET_EITHER"
6368  "
6369  {
6370  rtx base, offset, tmp;
6371
6372  gcc_checking_assert (aligned_operand (operands[0], SImode));
6373  gcc_checking_assert (aligned_operand (operands[1], SImode));
6374  if (TARGET_32BIT || TARGET_HAVE_MOVT)
6375    {
6376      /* Everything except mem = const or mem = mem can be done easily.  */
6377      if (MEM_P (operands[0]))
6378        operands[1] = force_reg (SImode, operands[1]);
6379      if (arm_general_register_operand (operands[0], SImode)
6380	  && CONST_INT_P (operands[1])
6381          && !(const_ok_for_arm (INTVAL (operands[1]))
6382               || const_ok_for_arm (~INTVAL (operands[1]))))
6383        {
6384	   if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
6385	     {
6386		emit_insn (gen_rtx_SET (operands[0], operands[1]));
6387		DONE;
6388	     }
6389	  else
6390	     {
6391		arm_split_constant (SET, SImode, NULL_RTX,
6392	                            INTVAL (operands[1]), operands[0], NULL_RTX,
6393			            optimize && can_create_pseudo_p ());
6394		DONE;
6395	     }
6396        }
6397    }
6398  else /* Target doesn't have MOVT...  */
6399    {
6400      if (can_create_pseudo_p ())
6401        {
6402          if (!REG_P (operands[0]))
6403	    operands[1] = force_reg (SImode, operands[1]);
6404        }
6405    }
6406
6407  split_const (operands[1], &base, &offset);
6408  if (INTVAL (offset) != 0
6409      && targetm.cannot_force_const_mem (SImode, operands[1]))
6410    {
6411      tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6412      emit_move_insn (tmp, base);
6413      emit_insn (gen_addsi3 (operands[0], tmp, offset));
6414      DONE;
6415    }
6416
6417  tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
6418
6419  /* Recognize the case where operand[1] is a reference to thread-local
6420     data and load its address to a register.  Offsets have been split off
6421     already.  */
6422  if (arm_tls_referenced_p (operands[1]))
6423    operands[1] = legitimize_tls_address (operands[1], tmp);
6424  else if (flag_pic
6425	   && (CONSTANT_P (operands[1])
6426	       || symbol_mentioned_p (operands[1])
6427	       || label_mentioned_p (operands[1])))
6428    operands[1] =
6429      legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
6430  }
6431  "
6432)
6433
6434;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6435;; LO_SUM adds in the high bits.  Fortunately these are opaque operations
6436;; so this does not matter.
6437(define_insn "*arm_movt"
6438  [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
6439	(lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
6440		   (match_operand:SI 2 "general_operand"      "i,i")))]
6441  "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
6442  "@
6443   movt%?\t%0, #:upper16:%c2
6444   movt\t%0, #:upper16:%c2"
6445  [(set_attr "arch"  "32,v8mb")
6446   (set_attr "predicable" "yes")
6447   (set_attr "length" "4")
6448   (set_attr "type" "alu_sreg")]
6449)
6450
6451(define_insn "*arm_movsi_insn"
6452  [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6453	(match_operand:SI 1 "general_operand"      "rk, I,K,j,mi,rk"))]
6454  "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
6455   && (   register_operand (operands[0], SImode)
6456       || register_operand (operands[1], SImode))"
6457  "@
6458   mov%?\\t%0, %1
6459   mov%?\\t%0, %1
6460   mvn%?\\t%0, #%B1
6461   movw%?\\t%0, %1
6462   ldr%?\\t%0, %1
6463   str%?\\t%1, %0"
6464  [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
6465   (set_attr "predicable" "yes")
6466   (set_attr "arch" "*,*,*,v6t2,*,*")
6467   (set_attr "pool_range" "*,*,*,*,4096,*")
6468   (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6469)
6470
6471(define_split
6472  [(set (match_operand:SI 0 "arm_general_register_operand" "")
6473	(match_operand:SI 1 "const_int_operand" ""))]
6474  "(TARGET_32BIT || TARGET_HAVE_MOVT)
6475  && (!(const_ok_for_arm (INTVAL (operands[1]))
6476        || const_ok_for_arm (~INTVAL (operands[1]))))"
6477  [(clobber (const_int 0))]
6478  "
6479  arm_split_constant (SET, SImode, NULL_RTX,
6480                      INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6481  DONE;
6482  "
6483)
6484
6485;; A normal way to do (symbol + offset) requires three instructions at least
6486;; (depends on how big the offset is) as below:
6487;; movw r0, #:lower16:g
6488;; movw r0, #:upper16:g
6489;; adds r0, #4
6490;;
6491;; A better way would be:
6492;; movw r0, #:lower16:g+4
6493;; movw r0, #:upper16:g+4
6494;;
6495;; The limitation of this way is that the length of offset should be a 16-bit
6496;; signed value, because current assembler only supports REL type relocation for
6497;; such case.  If the more powerful RELA type is supported in future, we should
6498;; update this pattern to go with better way.
6499(define_split
6500  [(set (match_operand:SI 0 "arm_general_register_operand" "")
6501	(const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
6502			   (match_operand:SI 2 "const_int_operand" ""))))]
6503  "TARGET_THUMB
6504   && TARGET_HAVE_MOVT
6505   && arm_disable_literal_pool
6506   && reload_completed
6507   && GET_CODE (operands[1]) == SYMBOL_REF"
6508  [(clobber (const_int 0))]
6509  "
6510    int offset = INTVAL (operands[2]);
6511
6512    if (offset < -0x8000 || offset > 0x7fff)
6513      {
6514	arm_emit_movpair (operands[0], operands[1]);
6515	emit_insn (gen_rtx_SET (operands[0],
6516				gen_rtx_PLUS (SImode, operands[0], operands[2])));
6517      }
6518    else
6519      {
6520	rtx op = gen_rtx_CONST (SImode,
6521				gen_rtx_PLUS (SImode, operands[1], operands[2]));
6522	arm_emit_movpair (operands[0], op);
6523      }
6524  "
6525)
6526
6527;; Split symbol_refs at the later stage (after cprop), instead of generating
6528;; movt/movw pair directly at expand.  Otherwise corresponding high_sum
6529;; and lo_sum would be merged back into memory load at cprop.  However,
6530;; if the default is to prefer movt/movw rather than a load from the constant
6531;; pool, the performance is better.
6532(define_split
6533  [(set (match_operand:SI 0 "arm_general_register_operand" "")
6534       (match_operand:SI 1 "general_operand" ""))]
6535  "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6536   && !target_word_relocations
6537   && !arm_tls_referenced_p (operands[1])"
6538  [(clobber (const_int 0))]
6539{
6540  arm_emit_movpair (operands[0], operands[1]);
6541  DONE;
6542})
6543
6544;; When generating pic, we need to load the symbol offset into a register.
6545;; So that the optimizer does not confuse this with a normal symbol load
6546;; we use an unspec.  The offset will be loaded from a constant pool entry,
6547;; since that is the only type of relocation we can use.
6548
6549;; Wrap calculation of the whole PIC address in a single pattern for the
6550;; benefit of optimizers, particularly, PRE and HOIST.  Calculation of
6551;; a PIC address involves two loads from memory, so we want to CSE it
6552;; as often as possible.
6553;; This pattern will be split into one of the pic_load_addr_* patterns
6554;; and a move after GCSE optimizations.
6555;;
6556;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6557(define_expand "calculate_pic_address"
6558  [(set (match_operand:SI 0 "register_operand")
6559	(mem:SI (plus:SI (match_operand:SI 1 "register_operand")
6560			 (unspec:SI [(match_operand:SI 2 "" "")]
6561				    UNSPEC_PIC_SYM))))]
6562  "flag_pic"
6563)
6564
6565;; Split calculate_pic_address into pic_load_addr_* and a move.
6566(define_split
6567  [(set (match_operand:SI 0 "register_operand" "")
6568	(mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6569			 (unspec:SI [(match_operand:SI 2 "" "")]
6570				    UNSPEC_PIC_SYM))))]
6571  "flag_pic"
6572  [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6573   (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6574  "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6575)
6576
6577;; operand1 is the memory address to go into
6578;; pic_load_addr_32bit.
6579;; operand2 is the PIC label to be emitted
6580;; from pic_add_dot_plus_eight.
6581;; We do this to allow hoisting of the entire insn.
6582(define_insn_and_split "pic_load_addr_unified"
6583  [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6584	(unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6585		    (match_operand:SI 2 "" "")]
6586		    UNSPEC_PIC_UNIFIED))]
6587 "flag_pic"
6588 "#"
6589 "&& reload_completed"
6590 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6591  (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6592       		     		 (match_dup 2)] UNSPEC_PIC_BASE))]
6593 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6594 [(set_attr "type" "load_4,load_4,load_4")
6595  (set_attr "pool_range" "4096,4094,1022")
6596  (set_attr "neg_pool_range" "4084,0,0")
6597  (set_attr "arch"  "a,t2,t1")
6598  (set_attr "length" "8,6,4")]
6599)
6600
6601;; The rather odd constraints on the following are to force reload to leave
6602;; the insn alone, and to force the minipool generation pass to then move
6603;; the GOT symbol to memory.
6604
6605(define_insn "pic_load_addr_32bit"
6606  [(set (match_operand:SI 0 "s_register_operand" "=r")
6607	(unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6608  "TARGET_32BIT && flag_pic"
6609  "ldr%?\\t%0, %1"
6610  [(set_attr "type" "load_4")
6611   (set (attr "pool_range")
6612	(if_then_else (eq_attr "is_thumb" "no")
6613		      (const_int 4096)
6614		      (const_int 4094)))
6615   (set (attr "neg_pool_range")
6616	(if_then_else (eq_attr "is_thumb" "no")
6617		      (const_int 4084)
6618		      (const_int 0)))]
6619)
6620
6621(define_insn "pic_load_addr_thumb1"
6622  [(set (match_operand:SI 0 "s_register_operand" "=l")
6623	(unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6624  "TARGET_THUMB1 && flag_pic"
6625  "ldr\\t%0, %1"
6626  [(set_attr "type" "load_4")
6627   (set (attr "pool_range") (const_int 1018))]
6628)
6629
6630(define_insn "pic_add_dot_plus_four"
6631  [(set (match_operand:SI 0 "register_operand" "=r")
6632	(unspec:SI [(match_operand:SI 1 "register_operand" "0")
6633		    (const_int 4)
6634		    (match_operand 2 "" "")]
6635		   UNSPEC_PIC_BASE))]
6636  "TARGET_THUMB"
6637  "*
6638  (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6639				     INTVAL (operands[2]));
6640  return \"add\\t%0, %|pc\";
6641  "
6642  [(set_attr "length" "2")
6643   (set_attr "type" "alu_sreg")]
6644)
6645
6646(define_insn "pic_add_dot_plus_eight"
6647  [(set (match_operand:SI 0 "register_operand" "=r")
6648	(unspec:SI [(match_operand:SI 1 "register_operand" "r")
6649		    (const_int 8)
6650		    (match_operand 2 "" "")]
6651		   UNSPEC_PIC_BASE))]
6652  "TARGET_ARM"
6653  "*
6654    (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6655				       INTVAL (operands[2]));
6656    return \"add%?\\t%0, %|pc, %1\";
6657  "
6658  [(set_attr "predicable" "yes")
6659   (set_attr "type" "alu_sreg")]
6660)
6661
6662(define_insn "tls_load_dot_plus_eight"
6663  [(set (match_operand:SI 0 "register_operand" "=r")
6664	(mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6665			    (const_int 8)
6666			    (match_operand 2 "" "")]
6667			   UNSPEC_PIC_BASE)))]
6668  "TARGET_ARM"
6669  "*
6670    (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6671				       INTVAL (operands[2]));
6672    return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6673  "
6674  [(set_attr "predicable" "yes")
6675   (set_attr "type" "load_4")]
6676)
6677
6678;; PIC references to local variables can generate pic_add_dot_plus_eight
6679;; followed by a load.  These sequences can be crunched down to
6680;; tls_load_dot_plus_eight by a peephole.
6681
6682(define_peephole2
6683  [(set (match_operand:SI 0 "register_operand" "")
6684	(unspec:SI [(match_operand:SI 3 "register_operand" "")
6685		    (const_int 8)
6686		    (match_operand 1 "" "")]
6687		   UNSPEC_PIC_BASE))
6688   (set (match_operand:SI 2 "arm_general_register_operand" "")
6689	(mem:SI (match_dup 0)))]
6690  "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6691  [(set (match_dup 2)
6692	(mem:SI (unspec:SI [(match_dup 3)
6693			    (const_int 8)
6694			    (match_dup 1)]
6695			   UNSPEC_PIC_BASE)))]
6696  ""
6697)
6698
6699(define_insn "pic_offset_arm"
6700  [(set (match_operand:SI 0 "register_operand" "=r")
6701	(mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6702			 (unspec:SI [(match_operand:SI 2 "" "X")]
6703				    UNSPEC_PIC_OFFSET))))]
6704  "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6705  "ldr%?\\t%0, [%1,%2]"
6706  [(set_attr "type" "load_4")]
6707)
6708
6709(define_expand "builtin_setjmp_receiver"
6710  [(label_ref (match_operand 0 "" ""))]
6711  "flag_pic"
6712  "
6713{
6714  /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6715     register.  */
6716  if (arm_pic_register != INVALID_REGNUM)
6717    arm_load_pic_register (1UL << 3, NULL_RTX);
6718  DONE;
6719}")
6720
6721;; If copying one reg to another we can set the condition codes according to
6722;; its value.  Such a move is common after a return from subroutine and the
6723;; result is being tested against zero.
6724
6725(define_insn "*movsi_compare0"
6726  [(set (reg:CC CC_REGNUM)
6727	(compare:CC (match_operand:SI 1 "s_register_operand" "0,0,l,rk,rk")
6728		    (const_int 0)))
6729   (set (match_operand:SI 0 "s_register_operand" "=l,rk,l,r,rk")
6730	(match_dup 1))]
6731  "TARGET_32BIT"
6732  "@
6733   cmp%?\\t%0, #0
6734   cmp%?\\t%0, #0
6735   subs%?\\t%0, %1, #0
6736   subs%?\\t%0, %1, #0
6737   subs%?\\t%0, %1, #0"
6738  [(set_attr "conds" "set")
6739   (set_attr "arch" "t2,*,t2,t2,a")
6740   (set_attr "type" "alus_imm")
6741   (set_attr "length" "2,4,2,4,4")]
6742)
6743
6744;; Subroutine to store a half word from a register into memory.
6745;; Operand 0 is the source register (HImode)
6746;; Operand 1 is the destination address in a register (SImode)
6747
6748;; In both this routine and the next, we must be careful not to spill
6749;; a memory address of reg+large_const into a separate PLUS insn, since this
6750;; can generate unrecognizable rtl.
6751
6752(define_expand "storehi"
6753  [;; store the low byte
6754   (set (match_operand 1 "" "") (match_dup 3))
6755   ;; extract the high byte
6756   (set (match_dup 2)
6757	(ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6758   ;; store the high byte
6759   (set (match_dup 4) (match_dup 5))]
6760  "TARGET_ARM"
6761  "
6762  {
6763    rtx op1 = operands[1];
6764    rtx addr = XEXP (op1, 0);
6765    enum rtx_code code = GET_CODE (addr);
6766
6767    if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6768	|| code == MINUS)
6769      op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6770
6771    operands[4] = adjust_address (op1, QImode, 1);
6772    operands[1] = adjust_address (operands[1], QImode, 0);
6773    operands[3] = gen_lowpart (QImode, operands[0]);
6774    operands[0] = gen_lowpart (SImode, operands[0]);
6775    operands[2] = gen_reg_rtx (SImode);
6776    operands[5] = gen_lowpart (QImode, operands[2]);
6777  }"
6778)
6779
6780(define_expand "storehi_bigend"
6781  [(set (match_dup 4) (match_dup 3))
6782   (set (match_dup 2)
6783	(ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6784   (set (match_operand 1 "" "")	(match_dup 5))]
6785  "TARGET_ARM"
6786  "
6787  {
6788    rtx op1 = operands[1];
6789    rtx addr = XEXP (op1, 0);
6790    enum rtx_code code = GET_CODE (addr);
6791
6792    if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6793	|| code == MINUS)
6794      op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6795
6796    operands[4] = adjust_address (op1, QImode, 1);
6797    operands[1] = adjust_address (operands[1], QImode, 0);
6798    operands[3] = gen_lowpart (QImode, operands[0]);
6799    operands[0] = gen_lowpart (SImode, operands[0]);
6800    operands[2] = gen_reg_rtx (SImode);
6801    operands[5] = gen_lowpart (QImode, operands[2]);
6802  }"
6803)
6804
6805;; Subroutine to store a half word integer constant into memory.
6806(define_expand "storeinthi"
6807  [(set (match_operand 0 "" "")
6808	(match_operand 1 "" ""))
6809   (set (match_dup 3) (match_dup 2))]
6810  "TARGET_ARM"
6811  "
6812  {
6813    HOST_WIDE_INT value = INTVAL (operands[1]);
6814    rtx addr = XEXP (operands[0], 0);
6815    rtx op0 = operands[0];
6816    enum rtx_code code = GET_CODE (addr);
6817
6818    if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6819	|| code == MINUS)
6820      op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6821
6822    operands[1] = gen_reg_rtx (SImode);
6823    if (BYTES_BIG_ENDIAN)
6824      {
6825	emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6826	if ((value & 255) == ((value >> 8) & 255))
6827	  operands[2] = operands[1];
6828	else
6829	  {
6830	    operands[2] = gen_reg_rtx (SImode);
6831	    emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6832	  }
6833      }
6834    else
6835      {
6836	emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6837	if ((value & 255) == ((value >> 8) & 255))
6838	  operands[2] = operands[1];
6839	else
6840	  {
6841	    operands[2] = gen_reg_rtx (SImode);
6842	    emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6843	  }
6844      }
6845
6846    operands[3] = adjust_address (op0, QImode, 1);
6847    operands[0] = adjust_address (operands[0], QImode, 0);
6848    operands[2] = gen_lowpart (QImode, operands[2]);
6849    operands[1] = gen_lowpart (QImode, operands[1]);
6850  }"
6851)
6852
6853(define_expand "storehi_single_op"
6854  [(set (match_operand:HI 0 "memory_operand")
6855	(match_operand:HI 1 "general_operand"))]
6856  "TARGET_32BIT && arm_arch4"
6857  "
6858  if (!s_register_operand (operands[1], HImode))
6859    operands[1] = copy_to_mode_reg (HImode, operands[1]);
6860  "
6861)
6862
6863(define_expand "movhi"
6864  [(set (match_operand:HI 0 "general_operand")
6865	(match_operand:HI 1 "general_operand"))]
6866  "TARGET_EITHER"
6867  "
6868  gcc_checking_assert (aligned_operand (operands[0], HImode));
6869  gcc_checking_assert (aligned_operand (operands[1], HImode));
6870  if (TARGET_ARM)
6871    {
6872      if (can_create_pseudo_p ())
6873        {
6874          if (MEM_P (operands[0]))
6875	    {
6876	      if (arm_arch4)
6877	        {
6878	          emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6879	          DONE;
6880	        }
6881	      if (CONST_INT_P (operands[1]))
6882	        emit_insn (gen_storeinthi (operands[0], operands[1]));
6883	      else
6884	        {
6885	          if (MEM_P (operands[1]))
6886		    operands[1] = force_reg (HImode, operands[1]);
6887	          if (BYTES_BIG_ENDIAN)
6888		    emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6889	          else
6890		   emit_insn (gen_storehi (operands[1], operands[0]));
6891	        }
6892	      DONE;
6893	    }
6894          /* Sign extend a constant, and keep it in an SImode reg.  */
6895          else if (CONST_INT_P (operands[1]))
6896	    {
6897	      rtx reg = gen_reg_rtx (SImode);
6898	      HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6899
6900	      /* If the constant is already valid, leave it alone.  */
6901	      if (!const_ok_for_arm (val))
6902	        {
6903	          /* If setting all the top bits will make the constant
6904		     loadable in a single instruction, then set them.
6905		     Otherwise, sign extend the number.  */
6906
6907	          if (const_ok_for_arm (~(val | ~0xffff)))
6908		    val |= ~0xffff;
6909	          else if (val & 0x8000)
6910		    val |= ~0xffff;
6911	        }
6912
6913	      emit_insn (gen_movsi (reg, GEN_INT (val)));
6914	      operands[1] = gen_lowpart (HImode, reg);
6915	    }
6916	  else if (arm_arch4 && optimize && can_create_pseudo_p ()
6917		   && MEM_P (operands[1]))
6918	    {
6919	      rtx reg = gen_reg_rtx (SImode);
6920
6921	      emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6922	      operands[1] = gen_lowpart (HImode, reg);
6923	    }
6924          else if (!arm_arch4)
6925	    {
6926	      if (MEM_P (operands[1]))
6927	        {
6928		  rtx base;
6929		  rtx offset = const0_rtx;
6930		  rtx reg = gen_reg_rtx (SImode);
6931
6932		  if ((REG_P (base = XEXP (operands[1], 0))
6933		       || (GET_CODE (base) == PLUS
6934			   && (CONST_INT_P (offset = XEXP (base, 1)))
6935                           && ((INTVAL(offset) & 1) != 1)
6936			   && REG_P (base = XEXP (base, 0))))
6937		      && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6938		    {
6939		      rtx new_rtx;
6940
6941		      new_rtx = widen_memory_access (operands[1], SImode,
6942						     ((INTVAL (offset) & ~3)
6943						      - INTVAL (offset)));
6944		      emit_insn (gen_movsi (reg, new_rtx));
6945		      if (((INTVAL (offset) & 2) != 0)
6946			  ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6947			{
6948			  rtx reg2 = gen_reg_rtx (SImode);
6949
6950			  emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6951			  reg = reg2;
6952			}
6953		    }
6954		  else
6955		    emit_insn (gen_movhi_bytes (reg, operands[1]));
6956
6957		  operands[1] = gen_lowpart (HImode, reg);
6958	       }
6959	   }
6960        }
6961      /* Handle loading a large integer during reload.  */
6962      else if (CONST_INT_P (operands[1])
6963	       && !const_ok_for_arm (INTVAL (operands[1]))
6964	       && !const_ok_for_arm (~INTVAL (operands[1])))
6965        {
6966          /* Writing a constant to memory needs a scratch, which should
6967	     be handled with SECONDARY_RELOADs.  */
6968          gcc_assert (REG_P (operands[0]));
6969
6970          operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6971          emit_insn (gen_movsi (operands[0], operands[1]));
6972          DONE;
6973       }
6974    }
6975  else if (TARGET_THUMB2)
6976    {
6977      /* Thumb-2 can do everything except mem=mem and mem=const easily.  */
6978      if (can_create_pseudo_p ())
6979	{
6980	  if (!REG_P (operands[0]))
6981	    operands[1] = force_reg (HImode, operands[1]);
6982          /* Zero extend a constant, and keep it in an SImode reg.  */
6983          else if (CONST_INT_P (operands[1]))
6984	    {
6985	      rtx reg = gen_reg_rtx (SImode);
6986	      HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6987
6988	      emit_insn (gen_movsi (reg, GEN_INT (val)));
6989	      operands[1] = gen_lowpart (HImode, reg);
6990	    }
6991	}
6992    }
6993  else /* TARGET_THUMB1 */
6994    {
6995      if (can_create_pseudo_p ())
6996        {
6997	  if (CONST_INT_P (operands[1]))
6998	    {
6999	      rtx reg = gen_reg_rtx (SImode);
7000
7001	      emit_insn (gen_movsi (reg, operands[1]));
7002	      operands[1] = gen_lowpart (HImode, reg);
7003	    }
7004
7005          /* ??? We shouldn't really get invalid addresses here, but this can
7006	     happen if we are passed a SP (never OK for HImode/QImode) or
7007	     virtual register (also rejected as illegitimate for HImode/QImode)
7008	     relative address.  */
7009          /* ??? This should perhaps be fixed elsewhere, for instance, in
7010	     fixup_stack_1, by checking for other kinds of invalid addresses,
7011	     e.g. a bare reference to a virtual register.  This may confuse the
7012	     alpha though, which must handle this case differently.  */
7013          if (MEM_P (operands[0])
7014	      && !memory_address_p (GET_MODE (operands[0]),
7015				    XEXP (operands[0], 0)))
7016	    operands[0]
7017	      = replace_equiv_address (operands[0],
7018				       copy_to_reg (XEXP (operands[0], 0)));
7019
7020          if (MEM_P (operands[1])
7021	      && !memory_address_p (GET_MODE (operands[1]),
7022				    XEXP (operands[1], 0)))
7023	    operands[1]
7024	      = replace_equiv_address (operands[1],
7025				       copy_to_reg (XEXP (operands[1], 0)));
7026
7027	  if (MEM_P (operands[1]) && optimize > 0)
7028	    {
7029	      rtx reg = gen_reg_rtx (SImode);
7030
7031	      emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7032	      operands[1] = gen_lowpart (HImode, reg);
7033	    }
7034
7035          if (MEM_P (operands[0]))
7036	    operands[1] = force_reg (HImode, operands[1]);
7037        }
7038      else if (CONST_INT_P (operands[1])
7039	        && !satisfies_constraint_I (operands[1]))
7040        {
7041	  /* Handle loading a large integer during reload.  */
7042
7043          /* Writing a constant to memory needs a scratch, which should
7044	     be handled with SECONDARY_RELOADs.  */
7045          gcc_assert (REG_P (operands[0]));
7046
7047          operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7048          emit_insn (gen_movsi (operands[0], operands[1]));
7049          DONE;
7050        }
7051    }
7052  "
7053)
7054
7055(define_expand "movhi_bytes"
7056  [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
7057   (set (match_dup 3)
7058	(zero_extend:SI (match_dup 6)))
7059   (set (match_operand:SI 0 "" "")
7060	 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
7061  "TARGET_ARM"
7062  "
7063  {
7064    rtx mem1, mem2;
7065    rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
7066
7067    mem1 = change_address (operands[1], QImode, addr);
7068    mem2 = change_address (operands[1], QImode,
7069			   plus_constant (Pmode, addr, 1));
7070    operands[0] = gen_lowpart (SImode, operands[0]);
7071    operands[1] = mem1;
7072    operands[2] = gen_reg_rtx (SImode);
7073    operands[3] = gen_reg_rtx (SImode);
7074    operands[6] = mem2;
7075
7076    if (BYTES_BIG_ENDIAN)
7077      {
7078	operands[4] = operands[2];
7079	operands[5] = operands[3];
7080      }
7081    else
7082      {
7083	operands[4] = operands[3];
7084	operands[5] = operands[2];
7085      }
7086  }"
7087)
7088
7089(define_expand "movhi_bigend"
7090  [(set (match_dup 2)
7091	(rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
7092		   (const_int 16)))
7093   (set (match_dup 3)
7094	(ashiftrt:SI (match_dup 2) (const_int 16)))
7095   (set (match_operand:HI 0 "s_register_operand")
7096	(match_dup 4))]
7097  "TARGET_ARM"
7098  "
7099  operands[2] = gen_reg_rtx (SImode);
7100  operands[3] = gen_reg_rtx (SImode);
7101  operands[4] = gen_lowpart (HImode, operands[3]);
7102  "
7103)
7104
7105;; Pattern to recognize insn generated default case above
7106(define_insn "*movhi_insn_arch4"
7107  [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
7108	(match_operand:HI 1 "general_operand"      "rIk,K,n,r,mi"))]
7109  "TARGET_ARM
7110   && arm_arch4 && !TARGET_HARD_FLOAT
7111   && (register_operand (operands[0], HImode)
7112       || register_operand (operands[1], HImode))"
7113  "@
7114   mov%?\\t%0, %1\\t%@ movhi
7115   mvn%?\\t%0, #%B1\\t%@ movhi
7116   movw%?\\t%0, %L1\\t%@ movhi
7117   strh%?\\t%1, %0\\t%@ movhi
7118   ldrh%?\\t%0, %1\\t%@ movhi"
7119  [(set_attr "predicable" "yes")
7120   (set_attr "pool_range" "*,*,*,*,256")
7121   (set_attr "neg_pool_range" "*,*,*,*,244")
7122   (set_attr "arch" "*,*,v6t2,*,*")
7123   (set_attr_alternative "type"
7124                         [(if_then_else (match_operand 1 "const_int_operand" "")
7125                                        (const_string "mov_imm" )
7126                                        (const_string "mov_reg"))
7127                          (const_string "mvn_imm")
7128                          (const_string "mov_imm")
7129                          (const_string "store_4")
7130                          (const_string "load_4")])]
7131)
7132
7133(define_insn "*movhi_bytes"
7134  [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
7135	(match_operand:HI 1 "arm_rhs_operand"  "I,rk,K"))]
7136  "TARGET_ARM && !TARGET_HARD_FLOAT"
7137  "@
7138   mov%?\\t%0, %1\\t%@ movhi
7139   mov%?\\t%0, %1\\t%@ movhi
7140   mvn%?\\t%0, #%B1\\t%@ movhi"
7141  [(set_attr "predicable" "yes")
7142   (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
7143)
7144
7145;; We use a DImode scratch because we may occasionally need an additional
7146;; temporary if the address isn't offsettable -- push_reload doesn't seem
7147;; to take any notice of the "o" constraints on reload_memory_operand operand.
7148;; The reload_in<m> and reload_out<m> patterns require special constraints
7149;; to be correctly handled in default_secondary_reload function.
7150(define_expand "reload_outhi"
7151  [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
7152	      (match_operand:HI 1 "s_register_operand"        "r")
7153	      (match_operand:DI 2 "s_register_operand"        "=&l")])]
7154  "TARGET_EITHER"
7155  "if (TARGET_ARM)
7156     arm_reload_out_hi (operands);
7157   else
7158     thumb_reload_out_hi (operands);
7159  DONE;
7160  "
7161)
7162
7163(define_expand "reload_inhi"
7164  [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
7165	      (match_operand:HI 1 "arm_reload_memory_operand" "o")
7166	      (match_operand:DI 2 "s_register_operand" "=&r")])]
7167  "TARGET_EITHER"
7168  "
7169  if (TARGET_ARM)
7170    arm_reload_in_hi (operands);
7171  else
7172    thumb_reload_out_hi (operands);
7173  DONE;
7174")
7175
7176(define_expand "movqi"
7177  [(set (match_operand:QI 0 "general_operand")
7178        (match_operand:QI 1 "general_operand"))]
7179  "TARGET_EITHER"
7180  "
7181  /* Everything except mem = const or mem = mem can be done easily */
7182
7183  if (can_create_pseudo_p ())
7184    {
7185      if (CONST_INT_P (operands[1]))
7186	{
7187	  rtx reg = gen_reg_rtx (SImode);
7188
7189	  /* For thumb we want an unsigned immediate, then we are more likely
7190	     to be able to use a movs insn.  */
7191	  if (TARGET_THUMB)
7192	    operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7193
7194	  emit_insn (gen_movsi (reg, operands[1]));
7195	  operands[1] = gen_lowpart (QImode, reg);
7196	}
7197
7198      if (TARGET_THUMB)
7199	{
7200          /* ??? We shouldn't really get invalid addresses here, but this can
7201	     happen if we are passed a SP (never OK for HImode/QImode) or
7202	     virtual register (also rejected as illegitimate for HImode/QImode)
7203	     relative address.  */
7204          /* ??? This should perhaps be fixed elsewhere, for instance, in
7205	     fixup_stack_1, by checking for other kinds of invalid addresses,
7206	     e.g. a bare reference to a virtual register.  This may confuse the
7207	     alpha though, which must handle this case differently.  */
7208          if (MEM_P (operands[0])
7209	      && !memory_address_p (GET_MODE (operands[0]),
7210		  		     XEXP (operands[0], 0)))
7211	    operands[0]
7212	      = replace_equiv_address (operands[0],
7213				       copy_to_reg (XEXP (operands[0], 0)));
7214          if (MEM_P (operands[1])
7215	      && !memory_address_p (GET_MODE (operands[1]),
7216				    XEXP (operands[1], 0)))
7217	     operands[1]
7218	       = replace_equiv_address (operands[1],
7219					copy_to_reg (XEXP (operands[1], 0)));
7220	}
7221
7222      if (MEM_P (operands[1]) && optimize > 0)
7223	{
7224	  rtx reg = gen_reg_rtx (SImode);
7225
7226	  emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7227	  operands[1] = gen_lowpart (QImode, reg);
7228	}
7229
7230      if (MEM_P (operands[0]))
7231	operands[1] = force_reg (QImode, operands[1]);
7232    }
7233  else if (TARGET_THUMB
7234	   && CONST_INT_P (operands[1])
7235	   && !satisfies_constraint_I (operands[1]))
7236    {
7237      /* Handle loading a large integer during reload.  */
7238
7239      /* Writing a constant to memory needs a scratch, which should
7240	 be handled with SECONDARY_RELOADs.  */
7241      gcc_assert (REG_P (operands[0]));
7242
7243      operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7244      emit_insn (gen_movsi (operands[0], operands[1]));
7245      DONE;
7246    }
7247  "
7248)
7249
7250(define_insn "*arm_movqi_insn"
7251  [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7252	(match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
7253  "TARGET_32BIT
7254   && (   register_operand (operands[0], QImode)
7255       || register_operand (operands[1], QImode))"
7256  "@
7257   mov%?\\t%0, %1
7258   mov%?\\t%0, %1
7259   mov%?\\t%0, %1
7260   mov%?\\t%0, %1
7261   mvn%?\\t%0, #%B1
7262   ldrb%?\\t%0, %1
7263   strb%?\\t%1, %0
7264   ldrb%?\\t%0, %1
7265   strb%?\\t%1, %0"
7266  [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
7267   (set_attr "predicable" "yes")
7268   (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
7269   (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7270   (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7271)
7272
7273;; HFmode and BFmode moves.
7274(define_expand "mov<mode>"
7275  [(set (match_operand:HFBF 0 "general_operand")
7276	(match_operand:HFBF 1 "general_operand"))]
7277  "TARGET_EITHER"
7278  "
7279  gcc_checking_assert (aligned_operand (operands[0], <MODE>mode));
7280  gcc_checking_assert (aligned_operand (operands[1], <MODE>mode));
7281  if (TARGET_32BIT)
7282    {
7283      if (MEM_P (operands[0]))
7284	operands[1] = force_reg (<MODE>mode, operands[1]);
7285    }
7286  else /* TARGET_THUMB1 */
7287    {
7288      if (can_create_pseudo_p ())
7289        {
7290           if (!REG_P (operands[0]))
7291	     operands[1] = force_reg (<MODE>mode, operands[1]);
7292        }
7293    }
7294  "
7295)
7296
7297(define_insn "*arm32_mov<mode>"
7298  [(set (match_operand:HFBF 0 "nonimmediate_operand" "=r,m,r,r")
7299	(match_operand:HFBF 1 "general_operand"	   " m,r,r,F"))]
7300  "TARGET_32BIT
7301   && !TARGET_HARD_FLOAT
7302   && !TARGET_HAVE_MVE
7303   && (	  s_register_operand (operands[0], <MODE>mode)
7304       || s_register_operand (operands[1], <MODE>mode))"
7305  "*
7306  switch (which_alternative)
7307    {
7308    case 0:	/* ARM register from memory */
7309      return \"ldrh%?\\t%0, %1\\t%@ __<fporbf>\";
7310    case 1:	/* memory from ARM register */
7311      return \"strh%?\\t%1, %0\\t%@ __<fporbf>\";
7312    case 2:	/* ARM register from ARM register */
7313      return \"mov%?\\t%0, %1\\t%@ __<fporbf>\";
7314    case 3:	/* ARM register from constant */
7315      {
7316	long bits;
7317	rtx ops[4];
7318
7319	bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
7320			       <MODE>mode);
7321	ops[0] = operands[0];
7322	ops[1] = GEN_INT (bits);
7323	ops[2] = GEN_INT (bits & 0xff00);
7324	ops[3] = GEN_INT (bits & 0x00ff);
7325
7326	if (arm_arch_thumb2)
7327	  output_asm_insn (\"movw%?\\t%0, %1\", ops);
7328	else
7329	  output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7330	return \"\";
7331       }
7332    default:
7333      gcc_unreachable ();
7334    }
7335  "
7336  [(set_attr "conds" "unconditional")
7337   (set_attr "type" "load_4,store_4,mov_reg,multiple")
7338   (set_attr "length" "4,4,4,8")
7339   (set_attr "predicable" "yes")]
7340)
7341
7342(define_expand "movsf"
7343  [(set (match_operand:SF 0 "general_operand")
7344	(match_operand:SF 1 "general_operand"))]
7345  "TARGET_EITHER"
7346  "
7347  gcc_checking_assert (aligned_operand (operands[0], SFmode));
7348  gcc_checking_assert (aligned_operand (operands[1], SFmode));
7349  if (TARGET_32BIT)
7350    {
7351      if (MEM_P (operands[0]))
7352        operands[1] = force_reg (SFmode, operands[1]);
7353    }
7354  else /* TARGET_THUMB1 */
7355    {
7356      if (can_create_pseudo_p ())
7357        {
7358           if (!REG_P (operands[0]))
7359	     operands[1] = force_reg (SFmode, operands[1]);
7360        }
7361    }
7362
7363  /* Cannot load it directly, generate a load with clobber so that it can be
7364     loaded via GPR with MOV / MOVT.  */
7365  if (arm_disable_literal_pool
7366      && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7367      && CONST_DOUBLE_P (operands[1])
7368      && TARGET_VFP_BASE
7369      && !vfp3_const_double_rtx (operands[1]))
7370    {
7371      rtx clobreg = gen_reg_rtx (SFmode);
7372      emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
7373						   clobreg));
7374      DONE;
7375    }
7376  "
7377)
7378
7379;; Transform a floating-point move of a constant into a core register into
7380;; an SImode operation.
7381(define_split
7382  [(set (match_operand:SF 0 "arm_general_register_operand" "")
7383	(match_operand:SF 1 "immediate_operand" ""))]
7384  "TARGET_EITHER
7385   && reload_completed
7386   && CONST_DOUBLE_P (operands[1])"
7387  [(set (match_dup 2) (match_dup 3))]
7388  "
7389  operands[2] = gen_lowpart (SImode, operands[0]);
7390  operands[3] = gen_lowpart (SImode, operands[1]);
7391  if (operands[2] == 0 || operands[3] == 0)
7392    FAIL;
7393  "
7394)
7395
7396(define_insn "*arm_movsf_soft_insn"
7397  [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7398	(match_operand:SF 1 "general_operand"  "r,mE,r"))]
7399  "TARGET_32BIT
7400   && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7401   && (!MEM_P (operands[0])
7402       || register_operand (operands[1], SFmode))"
7403{
7404  switch (which_alternative)
7405    {
7406    case 0: return \"mov%?\\t%0, %1\";
7407    case 1:
7408      /* Cannot load it directly, split to load it via MOV / MOVT.  */
7409      if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7410	return \"#\";
7411      return \"ldr%?\\t%0, %1\\t%@ float\";
7412    case 2: return \"str%?\\t%1, %0\\t%@ float\";
7413    default: gcc_unreachable ();
7414    }
7415}
7416  [(set_attr "predicable" "yes")
7417   (set_attr "type" "mov_reg,load_4,store_4")
7418   (set_attr "arm_pool_range" "*,4096,*")
7419   (set_attr "thumb2_pool_range" "*,4094,*")
7420   (set_attr "arm_neg_pool_range" "*,4084,*")
7421   (set_attr "thumb2_neg_pool_range" "*,0,*")]
7422)
7423
7424;; Splitter for the above.
7425(define_split
7426  [(set (match_operand:SF 0 "s_register_operand")
7427	(match_operand:SF 1 "const_double_operand"))]
7428  "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7429  [(const_int 0)]
7430{
7431  long buf;
7432  real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
7433  rtx cst = gen_int_mode (buf, SImode);
7434  emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
7435  DONE;
7436}
7437)
7438
7439(define_expand "movdf"
7440  [(set (match_operand:DF 0 "general_operand")
7441	(match_operand:DF 1 "general_operand"))]
7442  "TARGET_EITHER"
7443  "
7444  gcc_checking_assert (aligned_operand (operands[0], DFmode));
7445  gcc_checking_assert (aligned_operand (operands[1], DFmode));
7446  if (TARGET_32BIT)
7447    {
7448      if (MEM_P (operands[0]))
7449        operands[1] = force_reg (DFmode, operands[1]);
7450    }
7451  else /* TARGET_THUMB */
7452    {
7453      if (can_create_pseudo_p ())
7454        {
7455          if (!REG_P (operands[0]))
7456	    operands[1] = force_reg (DFmode, operands[1]);
7457        }
7458    }
7459
7460  /* Cannot load it directly, generate a load with clobber so that it can be
7461     loaded via GPR with MOV / MOVT.  */
7462  if (arm_disable_literal_pool
7463      && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7464      && CONSTANT_P (operands[1])
7465      && TARGET_VFP_BASE
7466      && !arm_const_double_rtx (operands[1])
7467      && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
7468    {
7469      rtx clobreg = gen_reg_rtx (DFmode);
7470      emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
7471						   clobreg));
7472      DONE;
7473    }
7474  "
7475)
7476
7477;; Reloading a df mode value stored in integer regs to memory can require a
7478;; scratch reg.
7479;; Another reload_out<m> pattern that requires special constraints.
7480(define_expand "reload_outdf"
7481  [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7482   (match_operand:DF 1 "s_register_operand" "r")
7483   (match_operand:SI 2 "s_register_operand" "=&r")]
7484  "TARGET_THUMB2"
7485  "
7486  {
7487    enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7488
7489    if (code == REG)
7490      operands[2] = XEXP (operands[0], 0);
7491    else if (code == POST_INC || code == PRE_DEC)
7492      {
7493	operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7494	operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7495	emit_insn (gen_movdi (operands[0], operands[1]));
7496	DONE;
7497      }
7498    else if (code == PRE_INC)
7499      {
7500	rtx reg = XEXP (XEXP (operands[0], 0), 0);
7501
7502	emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7503	operands[2] = reg;
7504      }
7505    else if (code == POST_DEC)
7506      operands[2] = XEXP (XEXP (operands[0], 0), 0);
7507    else
7508      emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7509			     XEXP (XEXP (operands[0], 0), 1)));
7510
7511    emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
7512			    operands[1]));
7513
7514    if (code == POST_DEC)
7515      emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7516
7517    DONE;
7518  }"
7519)
7520
7521(define_insn "*movdf_soft_insn"
7522  [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
7523       (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
7524  "TARGET_32BIT && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7525   && (   register_operand (operands[0], DFmode)
7526       || register_operand (operands[1], DFmode))"
7527  "*
7528  switch (which_alternative)
7529    {
7530    case 0:
7531    case 1:
7532    case 2:
7533      return \"#\";
7534    case 3:
7535      /* Cannot load it directly, split to load it via MOV / MOVT.  */
7536      if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7537	return \"#\";
7538      /* Fall through.  */
7539    default:
7540      return output_move_double (operands, true, NULL);
7541    }
7542  "
7543  [(set_attr "length" "8,12,16,8,8")
7544   (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
7545   (set_attr "arm_pool_range" "*,*,*,1020,*")
7546   (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7547   (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7548   (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7549)
7550
7551;; Splitter for the above.
7552(define_split
7553  [(set (match_operand:DF 0 "s_register_operand")
7554	(match_operand:DF 1 "const_double_operand"))]
7555  "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7556  [(const_int 0)]
7557{
7558  long buf[2];
7559  int order = BYTES_BIG_ENDIAN ? 1 : 0;
7560  real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
7561  unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
7562  ival |= (zext_hwi (buf[1 - order], 32) << 32);
7563  rtx cst = gen_int_mode (ival, DImode);
7564  emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
7565  DONE;
7566}
7567)
7568
7569
7570;; load- and store-multiple insns
7571;; The arm can load/store any set of registers, provided that they are in
7572;; ascending order, but these expanders assume a contiguous set.
7573
7574(define_expand "load_multiple"
7575  [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7576                          (match_operand:SI 1 "" ""))
7577                     (use (match_operand:SI 2 "" ""))])]
7578  "TARGET_32BIT"
7579{
7580  HOST_WIDE_INT offset = 0;
7581
7582  /* Support only fixed point registers.  */
7583  if (!CONST_INT_P (operands[2])
7584      || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7585      || INTVAL (operands[2]) < 2
7586      || !MEM_P (operands[1])
7587      || !REG_P (operands[0])
7588      || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7589      || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7590    FAIL;
7591
7592  operands[3]
7593    = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7594			     INTVAL (operands[2]),
7595			     force_reg (SImode, XEXP (operands[1], 0)),
7596			     FALSE, operands[1], &offset);
7597})
7598
7599(define_expand "store_multiple"
7600  [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7601                          (match_operand:SI 1 "" ""))
7602                     (use (match_operand:SI 2 "" ""))])]
7603  "TARGET_32BIT"
7604{
7605  HOST_WIDE_INT offset = 0;
7606
7607  /* Support only fixed point registers.  */
7608  if (!CONST_INT_P (operands[2])
7609      || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7610      || INTVAL (operands[2]) < 2
7611      || !REG_P (operands[1])
7612      || !MEM_P (operands[0])
7613      || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7614      || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7615    FAIL;
7616
7617  operands[3]
7618    = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7619			      INTVAL (operands[2]),
7620			      force_reg (SImode, XEXP (operands[0], 0)),
7621			      FALSE, operands[0], &offset);
7622})
7623
7624
7625(define_expand "setmemsi"
7626  [(match_operand:BLK 0 "general_operand")
7627   (match_operand:SI 1 "const_int_operand")
7628   (match_operand:SI 2 "const_int_operand")
7629   (match_operand:SI 3 "const_int_operand")]
7630  "TARGET_32BIT"
7631{
7632  if (arm_gen_setmem (operands))
7633    DONE;
7634
7635  FAIL;
7636})
7637
7638
7639;; Move a block of memory if it is word aligned and MORE than 2 words long.
7640;; We could let this apply for blocks of less than this, but it clobbers so
7641;; many registers that there is then probably a better way.
7642
7643(define_expand "cpymemqi"
7644  [(match_operand:BLK 0 "general_operand")
7645   (match_operand:BLK 1 "general_operand")
7646   (match_operand:SI 2 "const_int_operand")
7647   (match_operand:SI 3 "const_int_operand")]
7648  ""
7649  "
7650  if (TARGET_32BIT)
7651    {
7652      if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7653          && !optimize_function_for_size_p (cfun))
7654        {
7655          if (gen_cpymem_ldrd_strd (operands))
7656            DONE;
7657          FAIL;
7658        }
7659
7660      if (arm_gen_cpymemqi (operands))
7661        DONE;
7662      FAIL;
7663    }
7664  else /* TARGET_THUMB1 */
7665    {
7666      if (   INTVAL (operands[3]) != 4
7667          || INTVAL (operands[2]) > 48)
7668        FAIL;
7669
7670      thumb_expand_cpymemqi (operands);
7671      DONE;
7672    }
7673  "
7674)
7675
7676
7677;; Compare & branch insns
7678;; The range calculations are based as follows:
7679;; For forward branches, the address calculation returns the address of
7680;; the next instruction.  This is 2 beyond the branch instruction.
7681;; For backward branches, the address calculation returns the address of
7682;; the first instruction in this pattern (cmp).  This is 2 before the branch
7683;; instruction for the shortest sequence, and 4 before the branch instruction
7684;; if we have to jump around an unconditional branch.
7685;; To the basic branch range the PC offset must be added (this is +4).
7686;; So for forward branches we have
7687;;   (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7688;; And for backward branches we have
7689;;   (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7690;;
7691;; In 16-bit Thumb these ranges are:
7692;; For a 'b'       pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7693;; For a 'b<cond>' pos_range = 254,  neg_range = -256  giving (-250 ->256).
7694
7695;; In 32-bit Thumb these ranges are:
7696;; For a 'b'       +/- 16MB is not checked for.
7697;; For a 'b<cond>' pos_range = 1048574,  neg_range = -1048576  giving
7698;; (-1048568 -> 1048576).
7699
7700(define_expand "cbranchsi4"
7701  [(set (pc) (if_then_else
7702	      (match_operator 0 "expandable_comparison_operator"
7703	       [(match_operand:SI 1 "s_register_operand")
7704	        (match_operand:SI 2 "nonmemory_operand")])
7705	      (label_ref (match_operand 3 "" ""))
7706	      (pc)))]
7707  "TARGET_EITHER"
7708  "
7709  if (!TARGET_THUMB1)
7710    {
7711      if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7712        FAIL;
7713      emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7714				      operands[3]));
7715      DONE;
7716    }
7717  if (thumb1_cmpneg_operand (operands[2], SImode))
7718    {
7719      emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7720					      operands[3], operands[0]));
7721      DONE;
7722    }
7723  if (!thumb1_cmp_operand (operands[2], SImode))
7724    operands[2] = force_reg (SImode, operands[2]);
7725  ")
7726
7727(define_expand "cbranchsf4"
7728  [(set (pc) (if_then_else
7729	      (match_operator 0 "expandable_comparison_operator"
7730	       [(match_operand:SF 1 "s_register_operand")
7731	        (match_operand:SF 2 "vfp_compare_operand")])
7732	      (label_ref (match_operand 3 "" ""))
7733	      (pc)))]
7734  "TARGET_32BIT && TARGET_HARD_FLOAT"
7735  "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7736				   operands[3])); DONE;"
7737)
7738
7739(define_expand "cbranchdf4"
7740  [(set (pc) (if_then_else
7741	      (match_operator 0 "expandable_comparison_operator"
7742	       [(match_operand:DF 1 "s_register_operand")
7743	        (match_operand:DF 2 "vfp_compare_operand")])
7744	      (label_ref (match_operand 3 "" ""))
7745	      (pc)))]
7746  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7747  "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7748				   operands[3])); DONE;"
7749)
7750
7751(define_expand "cbranchdi4"
7752  [(set (pc) (if_then_else
7753	      (match_operator 0 "expandable_comparison_operator"
7754	       [(match_operand:DI 1 "s_register_operand")
7755	        (match_operand:DI 2 "reg_or_int_operand")])
7756	      (label_ref (match_operand 3 "" ""))
7757	      (pc)))]
7758  "TARGET_32BIT"
7759  "{
7760     if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7761       FAIL;
7762     emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7763				       operands[3]));
7764     DONE;
7765   }"
7766)
7767
7768;; Comparison and test insns
7769
7770(define_insn "*arm_cmpsi_insn"
7771  [(set (reg:CC CC_REGNUM)
7772	(compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7773		    (match_operand:SI 1 "arm_add_operand"    "Py,r,r,I,L")))]
7774  "TARGET_32BIT"
7775  "@
7776   cmp%?\\t%0, %1
7777   cmp%?\\t%0, %1
7778   cmp%?\\t%0, %1
7779   cmp%?\\t%0, %1
7780   cmn%?\\t%0, #%n1"
7781  [(set_attr "conds" "set")
7782   (set_attr "arch" "t2,t2,any,any,any")
7783   (set_attr "length" "2,2,4,4,4")
7784   (set_attr "predicable" "yes")
7785   (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7786   (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7787)
7788
7789(define_insn "*cmpsi_shiftsi"
7790  [(set (reg:CC CC_REGNUM)
7791	(compare:CC (match_operand:SI   0 "s_register_operand" "r,r")
7792		    (match_operator:SI  3 "shift_operator"
7793		     [(match_operand:SI 1 "s_register_operand" "r,r")
7794		      (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
7795  "TARGET_32BIT"
7796  "cmp\\t%0, %1%S3"
7797  [(set_attr "conds" "set")
7798   (set_attr "shift" "1")
7799   (set_attr "arch" "32,a")
7800   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7801
7802(define_insn "*cmpsi_shiftsi_swp"
7803  [(set (reg:CC_SWP CC_REGNUM)
7804	(compare:CC_SWP (match_operator:SI 3 "shift_operator"
7805			 [(match_operand:SI 1 "s_register_operand" "r,r")
7806			  (match_operand:SI 2 "shift_amount_operand" "M,r")])
7807			(match_operand:SI 0 "s_register_operand" "r,r")))]
7808  "TARGET_32BIT"
7809  "cmp%?\\t%0, %1%S3"
7810  [(set_attr "conds" "set")
7811   (set_attr "shift" "1")
7812   (set_attr "arch" "32,a")
7813   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7814
7815(define_insn "*arm_cmpsi_negshiftsi_si"
7816  [(set (reg:CC_Z CC_REGNUM)
7817	(compare:CC_Z
7818	 (neg:SI (match_operator:SI 1 "shift_operator"
7819		    [(match_operand:SI 2 "s_register_operand" "r,r")
7820		     (match_operand:SI 3 "shift_amount_operand" "M,r")]))
7821	 (match_operand:SI 0 "s_register_operand" "r,r")))]
7822  "TARGET_32BIT"
7823  "cmn%?\\t%0, %2%S1"
7824  [(set_attr "conds" "set")
7825   (set_attr "arch" "32,a")
7826   (set_attr "shift" "2")
7827   (set_attr "type" "alus_shift_imm,alus_shift_reg")
7828   (set_attr "predicable" "yes")]
7829)
7830
7831; This insn allows redundant compares to be removed by cse, nothing should
7832; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7833; is deleted later on. The match_dup will match the mode here, so that
7834; mode changes of the condition codes aren't lost by this even though we don't
7835; specify what they are.
7836
7837(define_insn "*deleted_compare"
7838  [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7839  "TARGET_32BIT"
7840  "\\t%@ deleted compare"
7841  [(set_attr "conds" "set")
7842   (set_attr "length" "0")
7843   (set_attr "type" "no_insn")]
7844)
7845
7846
7847;; Conditional branch insns
7848
7849(define_expand "cbranch_cc"
7850  [(set (pc)
7851	(if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7852					    (match_operand 2 "" "")])
7853		      (label_ref (match_operand 3 "" ""))
7854		      (pc)))]
7855  "TARGET_32BIT"
7856  "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7857				      operands[1], operands[2], NULL_RTX);
7858   operands[2] = const0_rtx;"
7859)
7860
7861;;
7862;; Patterns to match conditional branch insns.
7863;;
7864
7865(define_insn "arm_cond_branch"
7866  [(set (pc)
7867	(if_then_else (match_operator 1 "arm_comparison_operator"
7868		       [(match_operand 2 "cc_register" "") (const_int 0)])
7869		      (label_ref (match_operand 0 "" ""))
7870		      (pc)))]
7871  "TARGET_32BIT"
7872  {
7873    if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7874    {
7875      arm_ccfsm_state += 2;
7876      return "";
7877    }
7878    switch (get_attr_length (insn))
7879      {
7880	case 2: /* Thumb2 16-bit b{cond}.  */
7881	case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}.  */
7882	  return "b%d1\t%l0";
7883	  break;
7884
7885	/* Thumb2 b{cond} out of range.  Use 16-bit b{cond} and
7886	   unconditional branch b.  */
7887	default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%D1\t");
7888      }
7889  }
7890  [(set_attr "conds" "use")
7891   (set_attr "type" "branch")
7892   (set (attr "length")
7893    (if_then_else (match_test "!TARGET_THUMB2")
7894
7895      ;;Target is not Thumb2, therefore is A32.  Generate b{cond}.
7896      (const_int 4)
7897
7898      ;; Check if target is within 16-bit Thumb2 b{cond} range.
7899      (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7900		         (le (minus (match_dup 0) (pc)) (const_int 256)))
7901
7902	;; Target is Thumb2, within narrow range.
7903	;; Generate b{cond}.
7904	(const_int 2)
7905
7906	;; Check if target is within 32-bit Thumb2 b{cond} range.
7907	(if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
7908			   (le (minus (match_dup 0) (pc)) (const_int 1048576)))
7909
7910	  ;; Target is Thumb2, within wide range.
7911	  ;; Generate b{cond}
7912	  (const_int 4)
7913	  ;; Target is Thumb2, out of range.
7914	  ;; Generate narrow b{cond} and unconditional branch b.
7915	  (const_int 6)))))]
7916)
7917
7918(define_insn "*arm_cond_branch_reversed"
7919  [(set (pc)
7920	(if_then_else (match_operator 1 "arm_comparison_operator"
7921		       [(match_operand 2 "cc_register" "") (const_int 0)])
7922		      (pc)
7923		      (label_ref (match_operand 0 "" ""))))]
7924  "TARGET_32BIT"
7925  {
7926    if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7927    {
7928      arm_ccfsm_state += 2;
7929      return "";
7930    }
7931    switch (get_attr_length (insn))
7932      {
7933	case 2: /* Thumb2 16-bit b{cond}.  */
7934	case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}.  */
7935	  return "b%D1\t%l0";
7936	  break;
7937
7938	/* Thumb2 b{cond} out of range.  Use 16-bit b{cond} and
7939	   unconditional branch b.  */
7940	default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%d1\t");
7941      }
7942  }
7943  [(set_attr "conds" "use")
7944   (set_attr "type" "branch")
7945   (set (attr "length")
7946    (if_then_else (match_test "!TARGET_THUMB2")
7947
7948      ;;Target is not Thumb2, therefore is A32.  Generate b{cond}.
7949      (const_int 4)
7950
7951      ;; Check if target is within 16-bit Thumb2 b{cond} range.
7952      (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7953			 (le (minus (match_dup 0) (pc)) (const_int 256)))
7954
7955	;; Target is Thumb2, within narrow range.
7956	;; Generate b{cond}.
7957	(const_int 2)
7958
7959	;; Check if target is within 32-bit Thumb2 b{cond} range.
7960	(if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
7961			   (le (minus (match_dup 0) (pc)) (const_int 1048576)))
7962
7963	  ;; Target is Thumb2, within wide range.
7964	  ;; Generate b{cond}.
7965	  (const_int 4)
7966	  ;; Target is Thumb2, out of range.
7967	  ;; Generate narrow b{cond} and unconditional branch b.
7968	  (const_int 6)))))]
7969)
7970
7971
7972
7973; scc insns
7974
7975(define_expand "cstore_cc"
7976  [(set (match_operand:SI 0 "s_register_operand")
7977	(match_operator:SI 1 "" [(match_operand 2 "" "")
7978				 (match_operand 3 "" "")]))]
7979  "TARGET_32BIT"
7980  "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7981				      operands[2], operands[3], NULL_RTX);
7982   operands[3] = const0_rtx;"
7983)
7984
7985(define_insn_and_split "*mov_scc"
7986  [(set (match_operand:SI 0 "s_register_operand" "=r")
7987	(match_operator:SI 1 "arm_comparison_operator_mode"
7988	 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7989  "TARGET_ARM"
7990  "#"   ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7991  "TARGET_ARM"
7992  [(set (match_dup 0)
7993        (if_then_else:SI (match_dup 1)
7994                         (const_int 1)
7995                         (const_int 0)))]
7996  ""
7997  [(set_attr "conds" "use")
7998   (set_attr "length" "8")
7999   (set_attr "type" "multiple")]
8000)
8001
8002(define_insn "*negscc_borrow"
8003  [(set (match_operand:SI 0 "s_register_operand" "=r")
8004	(neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
8005  "TARGET_32BIT"
8006  "sbc\\t%0, %0, %0"
8007  [(set_attr "conds" "use")
8008   (set_attr "length" "4")
8009   (set_attr "type" "adc_reg")]
8010)
8011
8012(define_insn_and_split "*mov_negscc"
8013  [(set (match_operand:SI 0 "s_register_operand" "=r")
8014	(neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
8015		 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8016  "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
8017  "#"   ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8018  "&& true"
8019  [(set (match_dup 0)
8020        (if_then_else:SI (match_dup 1)
8021                         (match_dup 3)
8022                         (const_int 0)))]
8023  {
8024    operands[3] = GEN_INT (~0);
8025  }
8026  [(set_attr "conds" "use")
8027   (set_attr "length" "8")
8028   (set_attr "type" "multiple")]
8029)
8030
8031(define_insn_and_split "*mov_notscc"
8032  [(set (match_operand:SI 0 "s_register_operand" "=r")
8033	(not:SI (match_operator:SI 1 "arm_comparison_operator"
8034		 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8035  "TARGET_ARM"
8036  "#"   ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8037  "TARGET_ARM"
8038  [(set (match_dup 0)
8039        (if_then_else:SI (match_dup 1)
8040                         (match_dup 3)
8041                         (match_dup 4)))]
8042  {
8043    operands[3] = GEN_INT (~1);
8044    operands[4] = GEN_INT (~0);
8045  }
8046  [(set_attr "conds" "use")
8047   (set_attr "length" "8")
8048   (set_attr "type" "multiple")]
8049)
8050
8051(define_expand "cstoresi4"
8052  [(set (match_operand:SI 0 "s_register_operand")
8053	(match_operator:SI 1 "expandable_comparison_operator"
8054	 [(match_operand:SI 2 "s_register_operand")
8055	  (match_operand:SI 3 "reg_or_int_operand")]))]
8056  "TARGET_32BIT || TARGET_THUMB1"
8057  "{
8058  rtx op3, scratch, scratch2;
8059
8060  if (!TARGET_THUMB1)
8061    {
8062      if (!arm_add_operand (operands[3], SImode))
8063	operands[3] = force_reg (SImode, operands[3]);
8064      emit_insn (gen_cstore_cc (operands[0], operands[1],
8065				operands[2], operands[3]));
8066      DONE;
8067    }
8068
8069  if (operands[3] == const0_rtx)
8070    {
8071      switch (GET_CODE (operands[1]))
8072	{
8073	case EQ:
8074	  emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8075	  break;
8076
8077	case NE:
8078	  emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8079	  break;
8080
8081	case LE:
8082          scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8083				  NULL_RTX, 0, OPTAB_WIDEN);
8084          scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8085				  NULL_RTX, 0, OPTAB_WIDEN);
8086          expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8087			operands[0], 1, OPTAB_WIDEN);
8088	  break;
8089
8090        case GE:
8091          scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8092				 NULL_RTX, 1);
8093          expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8094			NULL_RTX, 1, OPTAB_WIDEN);
8095          break;
8096
8097        case GT:
8098          scratch = expand_binop (SImode, ashr_optab, operands[2],
8099				  GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8100          scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8101				  NULL_RTX, 0, OPTAB_WIDEN);
8102          expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8103			0, OPTAB_WIDEN);
8104          break;
8105
8106	/* LT is handled by generic code.  No need for unsigned with 0.  */
8107	default:
8108	  FAIL;
8109	}
8110      DONE;
8111    }
8112
8113  switch (GET_CODE (operands[1]))
8114    {
8115    case EQ:
8116      scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8117			      NULL_RTX, 0, OPTAB_WIDEN);
8118      emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8119      break;
8120
8121    case NE:
8122      scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8123			      NULL_RTX, 0, OPTAB_WIDEN);
8124      emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8125      break;
8126
8127    case LE:
8128      op3 = force_reg (SImode, operands[3]);
8129
8130      scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8131			      NULL_RTX, 1, OPTAB_WIDEN);
8132      scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8133			      NULL_RTX, 0, OPTAB_WIDEN);
8134      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8135					  op3, operands[2]));
8136      break;
8137
8138    case GE:
8139      op3 = operands[3];
8140      if (!thumb1_cmp_operand (op3, SImode))
8141        op3 = force_reg (SImode, op3);
8142      scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8143			      NULL_RTX, 0, OPTAB_WIDEN);
8144      scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8145			       NULL_RTX, 1, OPTAB_WIDEN);
8146      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8147					  operands[2], op3));
8148      break;
8149
8150    case LEU:
8151      op3 = force_reg (SImode, operands[3]);
8152      scratch = force_reg (SImode, const0_rtx);
8153      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8154					  op3, operands[2]));
8155      break;
8156
8157    case GEU:
8158      op3 = operands[3];
8159      if (!thumb1_cmp_operand (op3, SImode))
8160        op3 = force_reg (SImode, op3);
8161      scratch = force_reg (SImode, const0_rtx);
8162      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8163					  operands[2], op3));
8164      break;
8165
8166    case LTU:
8167      op3 = operands[3];
8168      if (!thumb1_cmp_operand (op3, SImode))
8169        op3 = force_reg (SImode, op3);
8170      scratch = gen_reg_rtx (SImode);
8171      emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8172      break;
8173
8174    case GTU:
8175      op3 = force_reg (SImode, operands[3]);
8176      scratch = gen_reg_rtx (SImode);
8177      emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8178      break;
8179
8180    /* No good sequences for GT, LT.  */
8181    default:
8182      FAIL;
8183    }
8184  DONE;
8185}")
8186
8187(define_expand "cstorehf4"
8188  [(set (match_operand:SI 0 "s_register_operand")
8189	(match_operator:SI 1 "expandable_comparison_operator"
8190	 [(match_operand:HF 2 "s_register_operand")
8191	  (match_operand:HF 3 "vfp_compare_operand")]))]
8192  "TARGET_VFP_FP16INST"
8193  {
8194    if (!arm_validize_comparison (&operands[1],
8195				  &operands[2],
8196				  &operands[3]))
8197       FAIL;
8198
8199    emit_insn (gen_cstore_cc (operands[0], operands[1],
8200			      operands[2], operands[3]));
8201    DONE;
8202  }
8203)
8204
8205(define_expand "cstoresf4"
8206  [(set (match_operand:SI 0 "s_register_operand")
8207	(match_operator:SI 1 "expandable_comparison_operator"
8208	 [(match_operand:SF 2 "s_register_operand")
8209	  (match_operand:SF 3 "vfp_compare_operand")]))]
8210  "TARGET_32BIT && TARGET_HARD_FLOAT"
8211  "emit_insn (gen_cstore_cc (operands[0], operands[1],
8212			     operands[2], operands[3])); DONE;"
8213)
8214
8215(define_expand "cstoredf4"
8216  [(set (match_operand:SI 0 "s_register_operand")
8217	(match_operator:SI 1 "expandable_comparison_operator"
8218	 [(match_operand:DF 2 "s_register_operand")
8219	  (match_operand:DF 3 "vfp_compare_operand")]))]
8220  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8221  "emit_insn (gen_cstore_cc (operands[0], operands[1],
8222			     operands[2], operands[3])); DONE;"
8223)
8224
8225(define_expand "cstoredi4"
8226  [(set (match_operand:SI 0 "s_register_operand")
8227	(match_operator:SI 1 "expandable_comparison_operator"
8228	 [(match_operand:DI 2 "s_register_operand")
8229	  (match_operand:DI 3 "reg_or_int_operand")]))]
8230  "TARGET_32BIT"
8231  "{
8232     if (!arm_validize_comparison (&operands[1],
8233     				   &operands[2],
8234				   &operands[3]))
8235       FAIL;
8236     emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8237		      	         operands[3]));
8238     DONE;
8239   }"
8240)
8241
8242
8243;; Conditional move insns
8244
8245(define_expand "movsicc"
8246  [(set (match_operand:SI 0 "s_register_operand")
8247	(if_then_else:SI (match_operand 1 "expandable_comparison_operator")
8248			 (match_operand:SI 2 "arm_not_operand")
8249			 (match_operand:SI 3 "arm_not_operand")))]
8250  "TARGET_32BIT"
8251  "
8252  {
8253    enum rtx_code code;
8254    rtx ccreg;
8255
8256    if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8257				  &XEXP (operands[1], 1)))
8258      FAIL;
8259
8260    code = GET_CODE (operands[1]);
8261    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8262				 XEXP (operands[1], 1), NULL_RTX);
8263    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8264  }"
8265)
8266
8267(define_expand "movhfcc"
8268  [(set (match_operand:HF 0 "s_register_operand")
8269	(if_then_else:HF (match_operand 1 "arm_cond_move_operator")
8270			 (match_operand:HF 2 "s_register_operand")
8271			 (match_operand:HF 3 "s_register_operand")))]
8272  "TARGET_VFP_FP16INST"
8273  "
8274  {
8275    enum rtx_code code = GET_CODE (operands[1]);
8276    rtx ccreg;
8277
8278    if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8279				  &XEXP (operands[1], 1)))
8280      FAIL;
8281
8282    code = GET_CODE (operands[1]);
8283    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8284				 XEXP (operands[1], 1), NULL_RTX);
8285    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8286  }"
8287)
8288
8289(define_expand "movsfcc"
8290  [(set (match_operand:SF 0 "s_register_operand")
8291	(if_then_else:SF (match_operand 1 "arm_cond_move_operator")
8292			 (match_operand:SF 2 "s_register_operand")
8293			 (match_operand:SF 3 "s_register_operand")))]
8294  "TARGET_32BIT && TARGET_HARD_FLOAT"
8295  "
8296  {
8297    enum rtx_code code = GET_CODE (operands[1]);
8298    rtx ccreg;
8299
8300    if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8301       				  &XEXP (operands[1], 1)))
8302       FAIL;
8303
8304    code = GET_CODE (operands[1]);
8305    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8306				 XEXP (operands[1], 1), NULL_RTX);
8307    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8308  }"
8309)
8310
8311(define_expand "movdfcc"
8312  [(set (match_operand:DF 0 "s_register_operand")
8313	(if_then_else:DF (match_operand 1 "arm_cond_move_operator")
8314			 (match_operand:DF 2 "s_register_operand")
8315			 (match_operand:DF 3 "s_register_operand")))]
8316  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8317  "
8318  {
8319    enum rtx_code code = GET_CODE (operands[1]);
8320    rtx ccreg;
8321
8322    if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8323       				  &XEXP (operands[1], 1)))
8324       FAIL;
8325    code = GET_CODE (operands[1]);
8326    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8327				 XEXP (operands[1], 1), NULL_RTX);
8328    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8329  }"
8330)
8331
8332(define_insn "*cmov<mode>"
8333    [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8334	(if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8335			  [(match_operand 2 "cc_register" "") (const_int 0)])
8336			  (match_operand:SDF 3 "s_register_operand"
8337			                      "<F_constraint>")
8338			  (match_operand:SDF 4 "s_register_operand"
8339			                      "<F_constraint>")))]
8340  "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
8341  "*
8342  {
8343    enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8344    switch (code)
8345      {
8346      case ARM_GE:
8347      case ARM_GT:
8348      case ARM_EQ:
8349      case ARM_VS:
8350        return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8351      case ARM_LT:
8352      case ARM_LE:
8353      case ARM_NE:
8354      case ARM_VC:
8355        return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8356      default:
8357        gcc_unreachable ();
8358      }
8359    return \"\";
8360  }"
8361  [(set_attr "conds" "use")
8362   (set_attr "type" "fcsel")]
8363)
8364
8365(define_insn "*cmovhf"
8366    [(set (match_operand:HF 0 "s_register_operand" "=t")
8367	(if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
8368			 [(match_operand 2 "cc_register" "") (const_int 0)])
8369			  (match_operand:HF 3 "s_register_operand" "t")
8370			  (match_operand:HF 4 "s_register_operand" "t")))]
8371  "TARGET_VFP_FP16INST"
8372  "*
8373  {
8374    enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8375    switch (code)
8376      {
8377      case ARM_GE:
8378      case ARM_GT:
8379      case ARM_EQ:
8380      case ARM_VS:
8381	return \"vsel%d1.f16\\t%0, %3, %4\";
8382      case ARM_LT:
8383      case ARM_LE:
8384      case ARM_NE:
8385      case ARM_VC:
8386	return \"vsel%D1.f16\\t%0, %4, %3\";
8387      default:
8388	gcc_unreachable ();
8389      }
8390    return \"\";
8391  }"
8392  [(set_attr "conds" "use")
8393   (set_attr "type" "fcsel")]
8394)
8395
8396(define_insn_and_split "*movsicc_insn"
8397  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8398	(if_then_else:SI
8399	 (match_operator 3 "arm_comparison_operator"
8400	  [(match_operand 4 "cc_register" "") (const_int 0)])
8401	 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8402	 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8403  "TARGET_ARM"
8404  "@
8405   mov%D3\\t%0, %2
8406   mvn%D3\\t%0, #%B2
8407   mov%d3\\t%0, %1
8408   mvn%d3\\t%0, #%B1
8409   #
8410   #
8411   #
8412   #"
8413   ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8414   ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8415   ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8416   ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8417  "&& reload_completed"
8418  [(const_int 0)]
8419  {
8420    enum rtx_code rev_code;
8421    machine_mode mode;
8422    rtx rev_cond;
8423
8424    emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8425                                  operands[3],
8426                                  gen_rtx_SET (operands[0], operands[1])));
8427
8428    rev_code = GET_CODE (operands[3]);
8429    mode = GET_MODE (operands[4]);
8430    if (mode == CCFPmode || mode == CCFPEmode)
8431      rev_code = reverse_condition_maybe_unordered (rev_code);
8432    else
8433      rev_code = reverse_condition (rev_code);
8434
8435    rev_cond = gen_rtx_fmt_ee (rev_code,
8436                               VOIDmode,
8437                               operands[4],
8438                               const0_rtx);
8439    emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8440                                  rev_cond,
8441                                  gen_rtx_SET (operands[0], operands[2])));
8442    DONE;
8443  }
8444  [(set_attr "length" "4,4,4,4,8,8,8,8")
8445   (set_attr "conds" "use")
8446   (set_attr_alternative "type"
8447                         [(if_then_else (match_operand 2 "const_int_operand" "")
8448                                        (const_string "mov_imm")
8449                                        (const_string "mov_reg"))
8450                          (const_string "mvn_imm")
8451                          (if_then_else (match_operand 1 "const_int_operand" "")
8452                                        (const_string "mov_imm")
8453                                        (const_string "mov_reg"))
8454                          (const_string "mvn_imm")
8455                          (const_string "multiple")
8456                          (const_string "multiple")
8457                          (const_string "multiple")
8458                          (const_string "multiple")])]
8459)
8460
8461(define_insn "*movsfcc_soft_insn"
8462  [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8463	(if_then_else:SF (match_operator 3 "arm_comparison_operator"
8464			  [(match_operand 4 "cc_register" "") (const_int 0)])
8465			 (match_operand:SF 1 "s_register_operand" "0,r")
8466			 (match_operand:SF 2 "s_register_operand" "r,0")))]
8467  "TARGET_ARM && TARGET_SOFT_FLOAT"
8468  "@
8469   mov%D3\\t%0, %2
8470   mov%d3\\t%0, %1"
8471  [(set_attr "conds" "use")
8472   (set_attr "type" "mov_reg")]
8473)
8474
8475
8476;; Jump and linkage insns
8477
8478(define_expand "jump"
8479  [(set (pc)
8480	(label_ref (match_operand 0 "" "")))]
8481  "TARGET_EITHER"
8482  ""
8483)
8484
8485(define_insn "*arm_jump"
8486  [(set (pc)
8487	(label_ref (match_operand 0 "" "")))]
8488  "TARGET_32BIT"
8489  "*
8490  {
8491    if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8492      {
8493        arm_ccfsm_state += 2;
8494        return \"\";
8495      }
8496    return \"b%?\\t%l0\";
8497  }
8498  "
8499  [(set_attr "predicable" "yes")
8500   (set (attr "length")
8501	(if_then_else
8502	   (and (match_test "TARGET_THUMB2")
8503		(and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8504		     (le (minus (match_dup 0) (pc)) (const_int 2048))))
8505	   (const_int 2)
8506	   (const_int 4)))
8507   (set_attr "type" "branch")]
8508)
8509
8510(define_expand "call"
8511  [(parallel [(call (match_operand 0 "memory_operand")
8512	            (match_operand 1 "general_operand"))
8513	      (use (match_operand 2 "" ""))
8514	      (clobber (reg:SI LR_REGNUM))])]
8515  "TARGET_EITHER"
8516  "
8517  {
8518    rtx callee, pat;
8519    tree addr = MEM_EXPR (operands[0]);
8520
8521    /* In an untyped call, we can get NULL for operand 2.  */
8522    if (operands[2] == NULL_RTX)
8523      operands[2] = const0_rtx;
8524
8525    /* Decide if we should generate indirect calls by loading the
8526       32-bit address of the callee into a register before performing the
8527       branch and link.  */
8528    callee = XEXP (operands[0], 0);
8529    if (GET_CODE (callee) == SYMBOL_REF
8530	? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8531	: !REG_P (callee))
8532      XEXP (operands[0], 0) = force_reg (Pmode, callee);
8533
8534    if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
8535	/* Indirect call: set r9 with FDPIC value of callee.  */
8536	XEXP (operands[0], 0)
8537	  = arm_load_function_descriptor (XEXP (operands[0], 0));
8538
8539    if (detect_cmse_nonsecure_call (addr))
8540      {
8541	pat = gen_nonsecure_call_internal (operands[0], operands[1],
8542					   operands[2]);
8543	emit_call_insn (pat);
8544      }
8545    else
8546      {
8547	pat = gen_call_internal (operands[0], operands[1], operands[2]);
8548	arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
8549      }
8550
8551    /* Restore FDPIC register (r9) after call.  */
8552    if (TARGET_FDPIC)
8553      {
8554	rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8555	rtx initial_fdpic_reg
8556	    = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8557
8558	emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8559							initial_fdpic_reg));
8560      }
8561
8562    DONE;
8563  }"
8564)
8565
8566(define_insn "restore_pic_register_after_call"
8567  [(set (match_operand:SI 0 "s_register_operand" "+r,r")
8568        (unspec:SI [(match_dup 0)
8569                    (match_operand:SI 1 "nonimmediate_operand" "r,m")]
8570                   UNSPEC_PIC_RESTORE))]
8571  ""
8572  "@
8573  mov\t%0, %1
8574  ldr\t%0, %1"
8575)
8576
8577(define_expand "call_internal"
8578  [(parallel [(call (match_operand 0 "memory_operand")
8579	            (match_operand 1 "general_operand"))
8580	      (use (match_operand 2 "" ""))
8581	      (clobber (reg:SI LR_REGNUM))])])
8582
8583(define_expand "nonsecure_call_internal"
8584  [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
8585			       UNSPEC_NONSECURE_MEM)
8586		    (match_operand 1 "general_operand"))
8587	      (use (match_operand 2 "" ""))
8588	      (clobber (reg:SI LR_REGNUM))])]
8589  "use_cmse"
8590  {
8591    rtx addr = XEXP (operands[0], 0);
8592    rtx tmp = REG_P (addr) ? addr : force_reg (SImode, addr);
8593
8594    if (!TARGET_HAVE_FPCXT_CMSE)
8595      {
8596	rtx r4 = gen_rtx_REG (SImode, R4_REGNUM);
8597	emit_move_insn (r4, tmp);
8598	tmp = r4;
8599      }
8600
8601    if (tmp != addr)
8602      operands[0] = replace_equiv_address (operands[0], tmp);
8603  }
8604)
8605
8606(define_insn "*call_reg_armv5"
8607  [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8608         (match_operand 1 "" ""))
8609   (use (match_operand 2 "" ""))
8610   (clobber (reg:SI LR_REGNUM))]
8611  "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8612  "blx%?\\t%0"
8613  [(set_attr "type" "call")]
8614)
8615
8616(define_insn "*call_reg_arm"
8617  [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8618         (match_operand 1 "" ""))
8619   (use (match_operand 2 "" ""))
8620   (clobber (reg:SI LR_REGNUM))]
8621  "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8622  "*
8623  return output_call (operands);
8624  "
8625  ;; length is worst case, normally it is only two
8626  [(set_attr "length" "12")
8627   (set_attr "type" "call")]
8628)
8629
8630
8631(define_expand "call_value"
8632  [(parallel [(set (match_operand       0 "" "")
8633	           (call (match_operand 1 "memory_operand")
8634		         (match_operand 2 "general_operand")))
8635	      (use (match_operand 3 "" ""))
8636	      (clobber (reg:SI LR_REGNUM))])]
8637  "TARGET_EITHER"
8638  "
8639  {
8640    rtx pat, callee;
8641    tree addr = MEM_EXPR (operands[1]);
8642
8643    /* In an untyped call, we can get NULL for operand 2.  */
8644    if (operands[3] == 0)
8645      operands[3] = const0_rtx;
8646
8647    /* Decide if we should generate indirect calls by loading the
8648       32-bit address of the callee into a register before performing the
8649       branch and link.  */
8650    callee = XEXP (operands[1], 0);
8651    if (GET_CODE (callee) == SYMBOL_REF
8652	? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8653	: !REG_P (callee))
8654      XEXP (operands[1], 0) = force_reg (Pmode, callee);
8655
8656    if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
8657	/* Indirect call: set r9 with FDPIC value of callee.  */
8658	XEXP (operands[1], 0)
8659	  = arm_load_function_descriptor (XEXP (operands[1], 0));
8660
8661    if (detect_cmse_nonsecure_call (addr))
8662      {
8663	pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8664						 operands[2], operands[3]);
8665	emit_call_insn (pat);
8666      }
8667    else
8668      {
8669	pat = gen_call_value_internal (operands[0], operands[1],
8670				       operands[2], operands[3]);
8671	arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8672      }
8673
8674    /* Restore FDPIC register (r9) after call.  */
8675    if (TARGET_FDPIC)
8676      {
8677	rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8678	rtx initial_fdpic_reg
8679	    = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8680
8681	emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8682							initial_fdpic_reg));
8683      }
8684
8685    DONE;
8686  }"
8687)
8688
8689(define_expand "call_value_internal"
8690  [(parallel [(set (match_operand       0 "" "")
8691	           (call (match_operand 1 "memory_operand")
8692		         (match_operand 2 "general_operand")))
8693	      (use (match_operand 3 "" ""))
8694	      (clobber (reg:SI LR_REGNUM))])])
8695
8696(define_expand "nonsecure_call_value_internal"
8697  [(parallel [(set (match_operand       0 "" "")
8698		   (call (unspec:SI [(match_operand 1 "memory_operand")]
8699				    UNSPEC_NONSECURE_MEM)
8700			 (match_operand 2 "general_operand")))
8701	      (use (match_operand 3 "" ""))
8702	      (clobber (reg:SI LR_REGNUM))])]
8703  "use_cmse"
8704  "
8705  {
8706    if (!TARGET_HAVE_FPCXT_CMSE)
8707      {
8708	rtx tmp =
8709	  copy_to_suggested_reg (XEXP (operands[1], 0),
8710				 gen_rtx_REG (SImode, R4_REGNUM),
8711				 SImode);
8712
8713	operands[1] = replace_equiv_address (operands[1], tmp);
8714      }
8715  }")
8716
8717(define_insn "*call_value_reg_armv5"
8718  [(set (match_operand 0 "" "")
8719        (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8720	      (match_operand 2 "" "")))
8721   (use (match_operand 3 "" ""))
8722   (clobber (reg:SI LR_REGNUM))]
8723  "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8724  "blx%?\\t%1"
8725  [(set_attr "type" "call")]
8726)
8727
8728(define_insn "*call_value_reg_arm"
8729  [(set (match_operand 0 "" "")
8730        (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8731	      (match_operand 2 "" "")))
8732   (use (match_operand 3 "" ""))
8733   (clobber (reg:SI LR_REGNUM))]
8734  "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8735  "*
8736  return output_call (&operands[1]);
8737  "
8738  [(set_attr "length" "12")
8739   (set_attr "type" "call")]
8740)
8741
8742;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8743;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8744
8745(define_insn "*call_symbol"
8746  [(call (mem:SI (match_operand:SI 0 "" ""))
8747	 (match_operand 1 "" ""))
8748   (use (match_operand 2 "" ""))
8749   (clobber (reg:SI LR_REGNUM))]
8750  "TARGET_32BIT
8751   && !SIBLING_CALL_P (insn)
8752   && (GET_CODE (operands[0]) == SYMBOL_REF)
8753   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8754  "*
8755  {
8756   rtx op = operands[0];
8757
8758   /* Switch mode now when possible.  */
8759   if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8760	&& arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8761      return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8762
8763    return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8764  }"
8765  [(set_attr "type" "call")]
8766)
8767
8768(define_insn "*call_value_symbol"
8769  [(set (match_operand 0 "" "")
8770	(call (mem:SI (match_operand:SI 1 "" ""))
8771	(match_operand:SI 2 "" "")))
8772   (use (match_operand 3 "" ""))
8773   (clobber (reg:SI LR_REGNUM))]
8774  "TARGET_32BIT
8775   && !SIBLING_CALL_P (insn)
8776   && (GET_CODE (operands[1]) == SYMBOL_REF)
8777   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8778  "*
8779  {
8780   rtx op = operands[1];
8781
8782   /* Switch mode now when possible.  */
8783   if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8784	&& arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8785      return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8786
8787    return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8788  }"
8789  [(set_attr "type" "call")]
8790)
8791
8792(define_expand "sibcall_internal"
8793  [(parallel [(call (match_operand 0 "memory_operand")
8794		    (match_operand 1 "general_operand"))
8795	      (return)
8796	      (use (match_operand 2 "" ""))])])
8797
8798;; We may also be able to do sibcalls for Thumb, but it's much harder...
8799(define_expand "sibcall"
8800  [(parallel [(call (match_operand 0 "memory_operand")
8801		    (match_operand 1 "general_operand"))
8802	      (return)
8803	      (use (match_operand 2 "" ""))])]
8804  "TARGET_32BIT"
8805  "
8806  {
8807    rtx pat;
8808
8809    if ((!REG_P (XEXP (operands[0], 0))
8810	 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8811	|| (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8812	    && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8813     XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8814
8815    if (operands[2] == NULL_RTX)
8816      operands[2] = const0_rtx;
8817
8818    pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8819    arm_emit_call_insn (pat, operands[0], true);
8820    DONE;
8821  }"
8822)
8823
8824(define_expand "sibcall_value_internal"
8825  [(parallel [(set (match_operand 0 "" "")
8826		   (call (match_operand 1 "memory_operand")
8827			 (match_operand 2 "general_operand")))
8828	      (return)
8829	      (use (match_operand 3 "" ""))])])
8830
8831(define_expand "sibcall_value"
8832  [(parallel [(set (match_operand 0 "" "")
8833		   (call (match_operand 1 "memory_operand")
8834			 (match_operand 2 "general_operand")))
8835	      (return)
8836	      (use (match_operand 3 "" ""))])]
8837  "TARGET_32BIT"
8838  "
8839  {
8840    rtx pat;
8841
8842    if ((!REG_P (XEXP (operands[1], 0))
8843	 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8844	|| (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8845	    && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8846     XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8847
8848    if (operands[3] == NULL_RTX)
8849      operands[3] = const0_rtx;
8850
8851    pat = gen_sibcall_value_internal (operands[0], operands[1],
8852                                      operands[2], operands[3]);
8853    arm_emit_call_insn (pat, operands[1], true);
8854    DONE;
8855  }"
8856)
8857
8858(define_insn "*sibcall_insn"
8859 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8860	(match_operand 1 "" ""))
8861  (return)
8862  (use (match_operand 2 "" ""))]
8863  "TARGET_32BIT && SIBLING_CALL_P (insn)"
8864  "*
8865  if (which_alternative == 1)
8866    return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8867  else
8868    {
8869      if (arm_arch5t || arm_arch4t)
8870	return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8871      else
8872	return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8873    }
8874  "
8875  [(set_attr "type" "call")]
8876)
8877
8878(define_insn "*sibcall_value_insn"
8879 [(set (match_operand 0 "" "")
8880       (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8881	     (match_operand 2 "" "")))
8882  (return)
8883  (use (match_operand 3 "" ""))]
8884  "TARGET_32BIT && SIBLING_CALL_P (insn)"
8885  "*
8886  if (which_alternative == 1)
8887   return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8888  else
8889    {
8890      if (arm_arch5t || arm_arch4t)
8891	return \"bx%?\\t%1\";
8892      else
8893	return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8894    }
8895  "
8896  [(set_attr "type" "call")]
8897)
8898
8899(define_expand "<return_str>return"
8900  [(RETURNS)]
8901  "(TARGET_ARM || (TARGET_THUMB2
8902                   && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8903                   && !IS_STACKALIGN (arm_current_func_type ())))
8904    <return_cond_false>"
8905  "
8906  {
8907    if (TARGET_THUMB2)
8908      {
8909        thumb2_expand_return (<return_simple_p>);
8910        DONE;
8911      }
8912  }
8913  "
8914)
8915
8916;; Often the return insn will be the same as loading from memory, so set attr
8917(define_insn "*arm_return"
8918  [(return)]
8919  "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8920  "*
8921  {
8922    if (arm_ccfsm_state == 2)
8923      {
8924        arm_ccfsm_state += 2;
8925        return \"\";
8926      }
8927    return output_return_instruction (const_true_rtx, true, false, false);
8928  }"
8929  [(set_attr "type" "load_4")
8930   (set_attr "length" "12")
8931   (set_attr "predicable" "yes")]
8932)
8933
8934(define_insn "*cond_<return_str>return"
8935  [(set (pc)
8936        (if_then_else (match_operator 0 "arm_comparison_operator"
8937		       [(match_operand 1 "cc_register" "") (const_int 0)])
8938                      (RETURNS)
8939                      (pc)))]
8940  "TARGET_ARM  <return_cond_true>"
8941  "*
8942  {
8943    if (arm_ccfsm_state == 2)
8944      {
8945        arm_ccfsm_state += 2;
8946        return \"\";
8947      }
8948    return output_return_instruction (operands[0], true, false,
8949				      <return_simple_p>);
8950  }"
8951  [(set_attr "conds" "use")
8952   (set_attr "length" "12")
8953   (set_attr "type" "load_4")]
8954)
8955
8956(define_insn "*cond_<return_str>return_inverted"
8957  [(set (pc)
8958        (if_then_else (match_operator 0 "arm_comparison_operator"
8959		       [(match_operand 1 "cc_register" "") (const_int 0)])
8960                      (pc)
8961		      (RETURNS)))]
8962  "TARGET_ARM <return_cond_true>"
8963  "*
8964  {
8965    if (arm_ccfsm_state == 2)
8966      {
8967        arm_ccfsm_state += 2;
8968        return \"\";
8969      }
8970    return output_return_instruction (operands[0], true, true,
8971				      <return_simple_p>);
8972  }"
8973  [(set_attr "conds" "use")
8974   (set_attr "length" "12")
8975   (set_attr "type" "load_4")]
8976)
8977
8978(define_insn "*arm_simple_return"
8979  [(simple_return)]
8980  "TARGET_ARM"
8981  "*
8982  {
8983    if (arm_ccfsm_state == 2)
8984      {
8985        arm_ccfsm_state += 2;
8986        return \"\";
8987      }
8988    return output_return_instruction (const_true_rtx, true, false, true);
8989  }"
8990  [(set_attr "type" "branch")
8991   (set_attr "length" "4")
8992   (set_attr "predicable" "yes")]
8993)
8994
8995;; Generate a sequence of instructions to determine if the processor is
8996;; in 26-bit or 32-bit mode, and return the appropriate return address
8997;; mask.
8998
8999(define_expand "return_addr_mask"
9000  [(set (match_dup 1)
9001      (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9002		       (const_int 0)))
9003   (set (match_operand:SI 0 "s_register_operand")
9004      (if_then_else:SI (eq (match_dup 1) (const_int 0))
9005		       (const_int -1)
9006		       (const_int 67108860)))] ; 0x03fffffc
9007  "TARGET_ARM"
9008  "
9009  operands[1] = gen_rtx_REG (CC_NZmode, CC_REGNUM);
9010  ")
9011
9012(define_insn "*check_arch2"
9013  [(set (match_operand:CC_NZ 0 "cc_register" "")
9014      (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9015		       (const_int 0)))]
9016  "TARGET_ARM"
9017  "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9018  [(set_attr "length" "8")
9019   (set_attr "conds" "set")
9020   (set_attr "type" "multiple")]
9021)
9022
9023;; Call subroutine returning any type.
9024
9025(define_expand "untyped_call"
9026  [(parallel [(call (match_operand 0 "" "")
9027		    (const_int 0))
9028	      (match_operand 1 "" "")
9029	      (match_operand 2 "" "")])]
9030  "TARGET_EITHER && !TARGET_FDPIC"
9031  "
9032  {
9033    int i;
9034    rtx par = gen_rtx_PARALLEL (VOIDmode,
9035				rtvec_alloc (XVECLEN (operands[2], 0)));
9036    rtx addr = gen_reg_rtx (Pmode);
9037    rtx mem;
9038    int size = 0;
9039
9040    emit_move_insn (addr, XEXP (operands[1], 0));
9041    mem = change_address (operands[1], BLKmode, addr);
9042
9043    for (i = 0; i < XVECLEN (operands[2], 0); i++)
9044      {
9045	rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9046
9047	/* Default code only uses r0 as a return value, but we could
9048	   be using anything up to 4 registers.  */
9049	if (REGNO (src) == R0_REGNUM)
9050	  src = gen_rtx_REG (TImode, R0_REGNUM);
9051
9052        XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9053						 GEN_INT (size));
9054        size += GET_MODE_SIZE (GET_MODE (src));
9055      }
9056
9057    emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
9058
9059    size = 0;
9060
9061    for (i = 0; i < XVECLEN (par, 0); i++)
9062      {
9063	HOST_WIDE_INT offset = 0;
9064	rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9065
9066	if (size != 0)
9067	  emit_move_insn (addr, plus_constant (Pmode, addr, size));
9068
9069	mem = change_address (mem, GET_MODE (reg), NULL);
9070	if (REGNO (reg) == R0_REGNUM)
9071	  {
9072	    /* On thumb we have to use a write-back instruction.  */
9073	    emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9074 		       TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9075	    size = TARGET_ARM ? 16 : 0;
9076	  }
9077	else
9078	  {
9079	    emit_move_insn (mem, reg);
9080	    size = GET_MODE_SIZE (GET_MODE (reg));
9081	  }
9082      }
9083
9084    /* The optimizer does not know that the call sets the function value
9085       registers we stored in the result block.  We avoid problems by
9086       claiming that all hard registers are used and clobbered at this
9087       point.  */
9088    emit_insn (gen_blockage ());
9089
9090    DONE;
9091  }"
9092)
9093
9094(define_expand "untyped_return"
9095  [(match_operand:BLK 0 "memory_operand")
9096   (match_operand 1 "" "")]
9097  "TARGET_EITHER && !TARGET_FDPIC"
9098  "
9099  {
9100    int i;
9101    rtx addr = gen_reg_rtx (Pmode);
9102    rtx mem;
9103    int size = 0;
9104
9105    emit_move_insn (addr, XEXP (operands[0], 0));
9106    mem = change_address (operands[0], BLKmode, addr);
9107
9108    for (i = 0; i < XVECLEN (operands[1], 0); i++)
9109      {
9110	HOST_WIDE_INT offset = 0;
9111	rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9112
9113	if (size != 0)
9114	  emit_move_insn (addr, plus_constant (Pmode, addr, size));
9115
9116	mem = change_address (mem, GET_MODE (reg), NULL);
9117	if (REGNO (reg) == R0_REGNUM)
9118	  {
9119	    /* On thumb we have to use a write-back instruction.  */
9120	    emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9121 		       TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9122	    size = TARGET_ARM ? 16 : 0;
9123	  }
9124	else
9125	  {
9126	    emit_move_insn (reg, mem);
9127	    size = GET_MODE_SIZE (GET_MODE (reg));
9128	  }
9129      }
9130
9131    /* Emit USE insns before the return.  */
9132    for (i = 0; i < XVECLEN (operands[1], 0); i++)
9133      emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9134
9135    /* Construct the return.  */
9136    expand_naked_return ();
9137
9138    DONE;
9139  }"
9140)
9141
9142;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9143;; all of memory.  This blocks insns from being moved across this point.
9144
9145(define_insn "blockage"
9146  [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9147  "TARGET_EITHER"
9148  ""
9149  [(set_attr "length" "0")
9150   (set_attr "type" "block")]
9151)
9152
9153;; Since we hard code r0 here use the 'o' constraint to prevent
9154;; provoking undefined behaviour in the hardware with putting out
9155;; auto-increment operations with potentially r0 as the base register.
9156(define_insn "probe_stack"
9157  [(set (match_operand:SI 0 "memory_operand" "=o")
9158        (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
9159  "TARGET_32BIT"
9160  "str%?\\tr0, %0"
9161  [(set_attr "type" "store_4")
9162   (set_attr "predicable" "yes")]
9163)
9164
9165(define_insn "probe_stack_range"
9166  [(set (match_operand:SI 0 "register_operand" "=r")
9167	(unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
9168			     (match_operand:SI 2 "register_operand" "r")]
9169			     VUNSPEC_PROBE_STACK_RANGE))]
9170  "TARGET_32BIT"
9171{
9172  return output_probe_stack_range (operands[0], operands[2]);
9173}
9174  [(set_attr "type" "multiple")
9175   (set_attr "conds" "clob")]
9176)
9177
9178;; Named patterns for stack smashing protection.
9179(define_expand "stack_protect_combined_set"
9180  [(parallel
9181     [(set (match_operand:SI 0 "memory_operand")
9182	   (unspec:SI [(match_operand:SI 1 "guard_operand")]
9183		      UNSPEC_SP_SET))
9184      (clobber (match_scratch:SI 2 ""))
9185      (clobber (match_scratch:SI 3 ""))])]
9186  ""
9187  ""
9188)
9189
9190;; Use a separate insn from the above expand to be able to have the mem outside
9191;; the operand #1 when register allocation comes. This is needed to avoid LRA
9192;; try to reload the guard since we need to control how PIC access is done in
9193;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9194;; legitimize_pic_address ()).
9195(define_insn_and_split "*stack_protect_combined_set_insn"
9196  [(set (match_operand:SI 0 "memory_operand" "=m,m")
9197	(unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9198		   UNSPEC_SP_SET))
9199   (clobber (match_scratch:SI 2 "=&l,&r"))
9200   (clobber (match_scratch:SI 3 "=&l,&r"))]
9201  ""
9202  "#"
9203  "reload_completed"
9204  [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
9205					    UNSPEC_SP_SET))
9206	      (clobber (match_dup 2))])]
9207  "
9208{
9209  if (flag_pic)
9210    {
9211      rtx pic_reg;
9212
9213      if (TARGET_FDPIC)
9214	  pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9215      else
9216	  pic_reg = operands[3];
9217
9218      /* Forces recomputing of GOT base now.  */
9219      legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
9220			      true /*compute_now*/);
9221    }
9222  else
9223    {
9224      if (address_operand (operands[1], SImode))
9225	operands[2] = operands[1];
9226      else
9227	{
9228	  rtx mem = force_const_mem (SImode, operands[1]);
9229	  if (!general_operand (mem, SImode))
9230	    {
9231	      emit_move_insn (operands[2], XEXP (mem, 0));
9232	      mem = replace_equiv_address (mem, operands[2], false);
9233	    }
9234	  emit_move_insn (operands[2], mem);
9235	}
9236    }
9237}"
9238  [(set_attr "arch" "t1,32")]
9239)
9240
9241;; DO NOT SPLIT THIS INSN.  It's important for security reasons that the
9242;; canary value does not live beyond the life of this sequence.
9243(define_insn "*stack_protect_set_insn"
9244  [(set (match_operand:SI 0 "memory_operand" "=m,m")
9245	(unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
9246	 UNSPEC_SP_SET))
9247   (clobber (match_dup 1))]
9248  ""
9249  "@
9250   ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
9251   ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
9252  [(set_attr "length" "8,12")
9253   (set_attr "conds" "clob,nocond")
9254   (set_attr "type" "multiple")
9255   (set_attr "arch" "t1,32")]
9256)
9257
9258(define_expand "stack_protect_combined_test"
9259  [(parallel
9260     [(set (pc)
9261	   (if_then_else
9262		(eq (match_operand:SI 0 "memory_operand")
9263		    (unspec:SI [(match_operand:SI 1 "guard_operand")]
9264			       UNSPEC_SP_TEST))
9265		(label_ref (match_operand 2))
9266		(pc)))
9267      (clobber (match_scratch:SI 3 ""))
9268      (clobber (match_scratch:SI 4 ""))
9269      (clobber (reg:CC CC_REGNUM))])]
9270  ""
9271  ""
9272)
9273
9274;; Use a separate insn from the above expand to be able to have the mem outside
9275;; the operand #1 when register allocation comes. This is needed to avoid LRA
9276;; try to reload the guard since we need to control how PIC access is done in
9277;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9278;; legitimize_pic_address ()).
9279(define_insn_and_split "*stack_protect_combined_test_insn"
9280  [(set (pc)
9281	(if_then_else
9282		(eq (match_operand:SI 0 "memory_operand" "m,m")
9283		    (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9284			       UNSPEC_SP_TEST))
9285		(label_ref (match_operand 2))
9286		(pc)))
9287   (clobber (match_scratch:SI 3 "=&l,&r"))
9288   (clobber (match_scratch:SI 4 "=&l,&r"))
9289   (clobber (reg:CC CC_REGNUM))]
9290  ""
9291  "#"
9292  "reload_completed"
9293  [(const_int 0)]
9294{
9295  rtx eq;
9296
9297  if (flag_pic)
9298    {
9299      rtx pic_reg;
9300
9301      if (TARGET_FDPIC)
9302	  pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9303      else
9304	  pic_reg = operands[4];
9305
9306      /* Forces recomputing of GOT base now.  */
9307      legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
9308			      true /*compute_now*/);
9309    }
9310  else
9311    {
9312      if (address_operand (operands[1], SImode))
9313	operands[3] = operands[1];
9314      else
9315	{
9316	  rtx mem = force_const_mem (SImode, operands[1]);
9317	  if (!general_operand (mem, SImode))
9318	    {
9319	      emit_move_insn (operands[3], XEXP (mem, 0));
9320	      mem = replace_equiv_address (mem, operands[3], false);
9321	    }
9322	  emit_move_insn (operands[3], mem);
9323	}
9324    }
9325  if (TARGET_32BIT)
9326    {
9327      emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
9328						  operands[3]));
9329      rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9330      eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9331      emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9332    }
9333  else
9334    {
9335      emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
9336						     operands[3]));
9337      eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
9338      emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
9339				      operands[2]));
9340    }
9341  DONE;
9342}
9343  [(set_attr "arch" "t1,32")]
9344)
9345
9346;; DO NOT SPLIT THIS PATTERN.  It is important for security reasons that the
9347;; canary value does not live beyond the end of this sequence.
9348(define_insn "arm_stack_protect_test_insn"
9349  [(set (reg:CC_Z CC_REGNUM)
9350	(compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
9351				  (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
9352				 UNSPEC_SP_TEST)
9353		      (const_int 0)))
9354   (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
9355   (clobber (match_dup 2))]
9356  "TARGET_32BIT"
9357  "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;mov\t%2, #0"
9358  [(set_attr "length" "12,16")
9359   (set_attr "conds" "set")
9360   (set_attr "type" "multiple")
9361   (set_attr "arch" "t,32")]
9362)
9363
9364(define_expand "casesi"
9365  [(match_operand:SI 0 "s_register_operand")	; index to jump on
9366   (match_operand:SI 1 "const_int_operand")	; lower bound
9367   (match_operand:SI 2 "const_int_operand")	; total range
9368   (match_operand:SI 3 "" "")			; table label
9369   (match_operand:SI 4 "" "")]			; Out of range label
9370  "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
9371  "
9372  {
9373    enum insn_code code;
9374    if (operands[1] != const0_rtx)
9375      {
9376	rtx reg = gen_reg_rtx (SImode);
9377
9378	emit_insn (gen_addsi3 (reg, operands[0],
9379			       gen_int_mode (-INTVAL (operands[1]),
9380			       		     SImode)));
9381	operands[0] = reg;
9382      }
9383
9384    if (TARGET_ARM)
9385      code = CODE_FOR_arm_casesi_internal;
9386    else if (TARGET_THUMB1)
9387      code = CODE_FOR_thumb1_casesi_internal_pic;
9388    else if (flag_pic)
9389      code = CODE_FOR_thumb2_casesi_internal_pic;
9390    else
9391      code = CODE_FOR_thumb2_casesi_internal;
9392
9393    if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9394      operands[2] = force_reg (SImode, operands[2]);
9395
9396    emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9397					  operands[3], operands[4]));
9398    DONE;
9399  }"
9400)
9401
9402;; The USE in this pattern is needed to tell flow analysis that this is
9403;; a CASESI insn.  It has no other purpose.
9404(define_expand "arm_casesi_internal"
9405  [(parallel [(set (pc)
9406	       (if_then_else
9407		(leu (match_operand:SI 0 "s_register_operand")
9408		     (match_operand:SI 1 "arm_rhs_operand"))
9409		(match_dup 4)
9410		(label_ref:SI (match_operand 3 ""))))
9411	      (clobber (reg:CC CC_REGNUM))
9412	      (use (label_ref:SI (match_operand 2 "")))])]
9413  "TARGET_ARM"
9414{
9415  operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
9416  operands[4] = gen_rtx_PLUS (SImode, operands[4],
9417			      gen_rtx_LABEL_REF (SImode, operands[2]));
9418  operands[4] = gen_rtx_MEM (SImode, operands[4]);
9419  MEM_READONLY_P (operands[4]) = 1;
9420  MEM_NOTRAP_P (operands[4]) = 1;
9421})
9422
9423(define_insn "*arm_casesi_internal"
9424  [(parallel [(set (pc)
9425	       (if_then_else
9426		(leu (match_operand:SI 0 "s_register_operand" "r")
9427		     (match_operand:SI 1 "arm_rhs_operand" "rI"))
9428		(mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9429				 (label_ref:SI (match_operand 2 "" ""))))
9430		(label_ref:SI (match_operand 3 "" ""))))
9431	      (clobber (reg:CC CC_REGNUM))
9432	      (use (label_ref:SI (match_dup 2)))])]
9433  "TARGET_ARM"
9434  "*
9435    if (flag_pic)
9436      return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9437    return   \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9438  "
9439  [(set_attr "conds" "clob")
9440   (set_attr "length" "12")
9441   (set_attr "type" "multiple")]
9442)
9443
9444(define_expand "indirect_jump"
9445  [(set (pc)
9446	(match_operand:SI 0 "s_register_operand"))]
9447  "TARGET_EITHER"
9448  "
9449  /* Thumb-2 doesn't have mov pc, reg.  Explicitly set the low bit of the
9450     address and use bx.  */
9451  if (TARGET_THUMB2)
9452    {
9453      rtx tmp;
9454      tmp = gen_reg_rtx (SImode);
9455      emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9456      operands[0] = tmp;
9457    }
9458  "
9459)
9460
9461;; NB Never uses BX.
9462(define_insn "*arm_indirect_jump"
9463  [(set (pc)
9464	(match_operand:SI 0 "s_register_operand" "r"))]
9465  "TARGET_ARM"
9466  "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9467  [(set_attr "predicable" "yes")
9468   (set_attr "type" "branch")]
9469)
9470
9471(define_insn "*load_indirect_jump"
9472  [(set (pc)
9473	(match_operand:SI 0 "memory_operand" "m"))]
9474  "TARGET_ARM"
9475  "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9476  [(set_attr "type" "load_4")
9477   (set_attr "pool_range" "4096")
9478   (set_attr "neg_pool_range" "4084")
9479   (set_attr "predicable" "yes")]
9480)
9481
9482
9483;; Misc insns
9484
9485(define_insn "nop"
9486  [(const_int 0)]
9487  "TARGET_EITHER"
9488  "nop"
9489  [(set (attr "length")
9490	(if_then_else (eq_attr "is_thumb" "yes")
9491		      (const_int 2)
9492		      (const_int 4)))
9493   (set_attr "type" "mov_reg")]
9494)
9495
9496(define_insn "trap"
9497  [(trap_if (const_int 1) (const_int 0))]
9498  ""
9499  "*
9500  if (TARGET_ARM)
9501    return \".inst\\t0xe7f000f0\";
9502  else
9503    return \".inst\\t0xdeff\";
9504  "
9505  [(set (attr "length")
9506	(if_then_else (eq_attr "is_thumb" "yes")
9507		      (const_int 2)
9508		      (const_int 4)))
9509   (set_attr "type" "trap")
9510   (set_attr "conds" "unconditional")]
9511)
9512
9513
9514;; Patterns to allow combination of arithmetic, cond code and shifts
9515
9516(define_insn "*<arith_shift_insn>_multsi"
9517  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9518	(SHIFTABLE_OPS:SI
9519	 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
9520		  (match_operand:SI 3 "power_of_two_operand" ""))
9521	 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
9522  "TARGET_32BIT"
9523  "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
9524  [(set_attr "predicable" "yes")
9525   (set_attr "shift" "2")
9526   (set_attr "arch" "a,t2")
9527   (set_attr "autodetect_type" "alu_shift_mul_op3")])
9528
9529(define_insn "*<arith_shift_insn>_shiftsi"
9530  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9531	(SHIFTABLE_OPS:SI
9532	 (match_operator:SI 2 "shift_nomul_operator"
9533	  [(match_operand:SI 3 "s_register_operand" "r,r,r")
9534	   (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
9535	 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
9536  "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
9537  "<arith_shift_insn>%?\\t%0, %1, %3%S2"
9538  [(set_attr "predicable" "yes")
9539   (set_attr "shift" "3")
9540   (set_attr "arch" "a,t2,a")
9541   (set_attr "autodetect_type" "alu_shift_operator2")])
9542
9543(define_split
9544  [(set (match_operand:SI 0 "s_register_operand" "")
9545	(match_operator:SI 1 "shiftable_operator"
9546	 [(match_operator:SI 2 "shiftable_operator"
9547	   [(match_operator:SI 3 "shift_operator"
9548	     [(match_operand:SI 4 "s_register_operand" "")
9549	      (match_operand:SI 5 "reg_or_int_operand" "")])
9550	    (match_operand:SI 6 "s_register_operand" "")])
9551	  (match_operand:SI 7 "arm_rhs_operand" "")]))
9552   (clobber (match_operand:SI 8 "s_register_operand" ""))]
9553  "TARGET_32BIT"
9554  [(set (match_dup 8)
9555	(match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9556			 (match_dup 6)]))
9557   (set (match_dup 0)
9558	(match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9559  "")
9560
9561(define_insn "*arith_shiftsi_compare0"
9562  [(set (reg:CC_NZ CC_REGNUM)
9563        (compare:CC_NZ
9564	 (match_operator:SI 1 "shiftable_operator"
9565	  [(match_operator:SI 3 "shift_operator"
9566	    [(match_operand:SI 4 "s_register_operand" "r,r")
9567	     (match_operand:SI 5 "shift_amount_operand" "M,r")])
9568	   (match_operand:SI 2 "s_register_operand" "r,r")])
9569	 (const_int 0)))
9570   (set (match_operand:SI 0 "s_register_operand" "=r,r")
9571	(match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9572			 (match_dup 2)]))]
9573  "TARGET_32BIT"
9574  "%i1s%?\\t%0, %2, %4%S3"
9575  [(set_attr "conds" "set")
9576   (set_attr "shift" "4")
9577   (set_attr "arch" "32,a")
9578   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9579
9580(define_insn "*arith_shiftsi_compare0_scratch"
9581  [(set (reg:CC_NZ CC_REGNUM)
9582        (compare:CC_NZ
9583	 (match_operator:SI 1 "shiftable_operator"
9584	  [(match_operator:SI 3 "shift_operator"
9585	    [(match_operand:SI 4 "s_register_operand" "r,r")
9586	     (match_operand:SI 5 "shift_amount_operand" "M,r")])
9587	   (match_operand:SI 2 "s_register_operand" "r,r")])
9588	 (const_int 0)))
9589   (clobber (match_scratch:SI 0 "=r,r"))]
9590  "TARGET_32BIT"
9591  "%i1s%?\\t%0, %2, %4%S3"
9592  [(set_attr "conds" "set")
9593   (set_attr "shift" "4")
9594   (set_attr "arch" "32,a")
9595   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9596
9597(define_insn "*sub_shiftsi"
9598  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9599	(minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9600		  (match_operator:SI 2 "shift_operator"
9601		   [(match_operand:SI 3 "s_register_operand" "r,r")
9602		    (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9603  "TARGET_32BIT"
9604  "sub%?\\t%0, %1, %3%S2"
9605  [(set_attr "predicable" "yes")
9606   (set_attr "predicable_short_it" "no")
9607   (set_attr "shift" "3")
9608   (set_attr "arch" "32,a")
9609   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9610
9611(define_insn "*sub_shiftsi_compare0"
9612  [(set (reg:CC_NZ CC_REGNUM)
9613	(compare:CC_NZ
9614	 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9615		   (match_operator:SI 2 "shift_operator"
9616		    [(match_operand:SI 3 "s_register_operand" "r,r")
9617		     (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9618	 (const_int 0)))
9619   (set (match_operand:SI 0 "s_register_operand" "=r,r")
9620	(minus:SI (match_dup 1)
9621		  (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9622  "TARGET_32BIT"
9623  "subs%?\\t%0, %1, %3%S2"
9624  [(set_attr "conds" "set")
9625   (set_attr "shift" "3")
9626   (set_attr "arch" "32,a")
9627   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9628
9629(define_insn "*sub_shiftsi_compare0_scratch"
9630  [(set (reg:CC_NZ CC_REGNUM)
9631	(compare:CC_NZ
9632	 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9633		   (match_operator:SI 2 "shift_operator"
9634		    [(match_operand:SI 3 "s_register_operand" "r,r")
9635		     (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9636	 (const_int 0)))
9637   (clobber (match_scratch:SI 0 "=r,r"))]
9638  "TARGET_32BIT"
9639  "subs%?\\t%0, %1, %3%S2"
9640  [(set_attr "conds" "set")
9641   (set_attr "shift" "3")
9642   (set_attr "arch" "32,a")
9643   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9644
9645
9646(define_insn_and_split "*and_scc"
9647  [(set (match_operand:SI 0 "s_register_operand" "=r")
9648	(and:SI (match_operator:SI 1 "arm_comparison_operator"
9649		 [(match_operand 2 "cc_register" "") (const_int 0)])
9650		(match_operand:SI 3 "s_register_operand" "r")))]
9651  "TARGET_ARM"
9652  "#"   ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9653  "&& reload_completed"
9654  [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9655   (cond_exec (match_dup 4) (set (match_dup 0)
9656                                 (and:SI (match_dup 3) (const_int 1))))]
9657  {
9658    machine_mode mode = GET_MODE (operands[2]);
9659    enum rtx_code rc = GET_CODE (operands[1]);
9660
9661    /* Note that operands[4] is the same as operands[1],
9662       but with VOIDmode as the result. */
9663    operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9664    if (mode == CCFPmode || mode == CCFPEmode)
9665      rc = reverse_condition_maybe_unordered (rc);
9666    else
9667      rc = reverse_condition (rc);
9668    operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9669  }
9670  [(set_attr "conds" "use")
9671   (set_attr "type" "multiple")
9672   (set_attr "length" "8")]
9673)
9674
9675(define_insn_and_split "*ior_scc"
9676  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9677	(ior:SI (match_operator:SI 1 "arm_comparison_operator"
9678		 [(match_operand 2 "cc_register" "") (const_int 0)])
9679		(match_operand:SI 3 "s_register_operand" "0,?r")))]
9680  "TARGET_ARM"
9681  "@
9682   orr%d1\\t%0, %3, #1
9683   #"
9684  "&& reload_completed
9685   && REGNO (operands [0]) != REGNO (operands[3])"
9686  ;; && which_alternative == 1
9687  ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9688  [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9689   (cond_exec (match_dup 4) (set (match_dup 0)
9690                                 (ior:SI (match_dup 3) (const_int 1))))]
9691  {
9692    machine_mode mode = GET_MODE (operands[2]);
9693    enum rtx_code rc = GET_CODE (operands[1]);
9694
9695    /* Note that operands[4] is the same as operands[1],
9696       but with VOIDmode as the result. */
9697    operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9698    if (mode == CCFPmode || mode == CCFPEmode)
9699      rc = reverse_condition_maybe_unordered (rc);
9700    else
9701      rc = reverse_condition (rc);
9702    operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9703  }
9704  [(set_attr "conds" "use")
9705   (set_attr "length" "4,8")
9706   (set_attr "type" "logic_imm,multiple")]
9707)
9708
9709; A series of splitters for the compare_scc pattern below.  Note that
9710; order is important.
9711(define_split
9712  [(set (match_operand:SI 0 "s_register_operand" "")
9713	(lt:SI (match_operand:SI 1 "s_register_operand" "")
9714	       (const_int 0)))
9715   (clobber (reg:CC CC_REGNUM))]
9716  "TARGET_32BIT && reload_completed"
9717  [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9718
9719(define_split
9720  [(set (match_operand:SI 0 "s_register_operand" "")
9721	(ge:SI (match_operand:SI 1 "s_register_operand" "")
9722	       (const_int 0)))
9723   (clobber (reg:CC CC_REGNUM))]
9724  "TARGET_32BIT && reload_completed"
9725  [(set (match_dup 0) (not:SI (match_dup 1)))
9726   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9727
9728(define_split
9729  [(set (match_operand:SI 0 "s_register_operand" "")
9730	(eq:SI (match_operand:SI 1 "s_register_operand" "")
9731	       (const_int 0)))
9732   (clobber (reg:CC CC_REGNUM))]
9733  "arm_arch5t && TARGET_32BIT"
9734  [(set (match_dup 0) (clz:SI (match_dup 1)))
9735   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9736)
9737
9738(define_split
9739  [(set (match_operand:SI 0 "s_register_operand" "")
9740	(eq:SI (match_operand:SI 1 "s_register_operand" "")
9741	       (const_int 0)))
9742   (clobber (reg:CC CC_REGNUM))]
9743  "TARGET_32BIT && reload_completed"
9744  [(parallel
9745    [(set (reg:CC CC_REGNUM)
9746	  (compare:CC (const_int 1) (match_dup 1)))
9747     (set (match_dup 0)
9748	  (minus:SI (const_int 1) (match_dup 1)))])
9749   (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9750	      (set (match_dup 0) (const_int 0)))])
9751
9752(define_split
9753  [(set (match_operand:SI 0 "s_register_operand" "")
9754	(ne:SI (match_operand:SI 1 "s_register_operand" "")
9755	       (match_operand:SI 2 "const_int_operand" "")))
9756   (clobber (reg:CC CC_REGNUM))]
9757  "TARGET_32BIT && reload_completed"
9758  [(parallel
9759    [(set (reg:CC CC_REGNUM)
9760	  (compare:CC (match_dup 1) (match_dup 2)))
9761     (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9762   (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9763	      (set (match_dup 0) (const_int 1)))]
9764{
9765  operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9766})
9767
9768(define_split
9769  [(set (match_operand:SI 0 "s_register_operand" "")
9770	(ne:SI (match_operand:SI 1 "s_register_operand" "")
9771	       (match_operand:SI 2 "arm_add_operand" "")))
9772   (clobber (reg:CC CC_REGNUM))]
9773  "TARGET_32BIT && reload_completed"
9774  [(parallel
9775    [(set (reg:CC_NZ CC_REGNUM)
9776	  (compare:CC_NZ (minus:SI (match_dup 1) (match_dup 2))
9777			   (const_int 0)))
9778     (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9779   (cond_exec (ne:CC_NZ (reg:CC_NZ CC_REGNUM) (const_int 0))
9780	      (set (match_dup 0) (const_int 1)))])
9781
9782(define_insn_and_split "*compare_scc"
9783  [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9784	(match_operator:SI 1 "arm_comparison_operator"
9785	 [(match_operand:SI 2 "s_register_operand" "r,r")
9786	  (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9787   (clobber (reg:CC CC_REGNUM))]
9788  "TARGET_32BIT"
9789  "#"
9790  "&& reload_completed"
9791  [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9792   (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9793   (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9794{
9795  rtx tmp1;
9796  machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9797					   operands[2], operands[3]);
9798  enum rtx_code rc = GET_CODE (operands[1]);
9799
9800  tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9801
9802  operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9803  if (mode == CCFPmode || mode == CCFPEmode)
9804    rc = reverse_condition_maybe_unordered (rc);
9805  else
9806    rc = reverse_condition (rc);
9807  operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9808}
9809  [(set_attr "type" "multiple")]
9810)
9811
9812;; Attempt to improve the sequence generated by the compare_scc splitters
9813;; not to use conditional execution.
9814
9815;; Rd = (eq (reg1) (const_int0))  // ARMv5
9816;;	clz Rd, reg1
9817;;	lsr Rd, Rd, #5
9818(define_peephole2
9819  [(set (reg:CC CC_REGNUM)
9820	(compare:CC (match_operand:SI 1 "register_operand" "")
9821		    (const_int 0)))
9822   (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9823	      (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9824   (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9825	      (set (match_dup 0) (const_int 1)))]
9826  "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9827  [(set (match_dup 0) (clz:SI (match_dup 1)))
9828   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9829)
9830
9831;; Rd = (eq (reg1) (const_int0))  // !ARMv5
9832;;	negs Rd, reg1
9833;;	adc  Rd, Rd, reg1
9834(define_peephole2
9835  [(set (reg:CC CC_REGNUM)
9836	(compare:CC (match_operand:SI 1 "register_operand" "")
9837		    (const_int 0)))
9838   (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9839	      (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9840   (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9841	      (set (match_dup 0) (const_int 1)))
9842   (match_scratch:SI 2 "r")]
9843  "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9844  [(parallel
9845    [(set (reg:CC CC_REGNUM)
9846	  (compare:CC (const_int 0) (match_dup 1)))
9847     (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9848   (set (match_dup 0)
9849	(plus:SI (plus:SI (match_dup 1) (match_dup 2))
9850		 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9851)
9852
9853;; Rd = (eq (reg1) (reg2/imm))	// ARMv5 and optimising for speed.
9854;;	sub  Rd, Reg1, reg2
9855;;	clz  Rd, Rd
9856;;	lsr  Rd, Rd, #5
9857(define_peephole2
9858  [(set (reg:CC CC_REGNUM)
9859	(compare:CC (match_operand:SI 1 "register_operand" "")
9860		    (match_operand:SI 2 "arm_rhs_operand" "")))
9861   (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9862	      (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9863   (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9864	      (set (match_dup 0) (const_int 1)))]
9865  "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9866  && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9867  [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9868   (set (match_dup 0) (clz:SI (match_dup 0)))
9869   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9870)
9871
9872
9873;; Rd = (eq (reg1) (reg2))	// ! ARMv5 or optimising for size.
9874;;	sub  T1, Reg1, reg2
9875;;	negs Rd, T1
9876;;	adc  Rd, Rd, T1
9877(define_peephole2
9878  [(set (reg:CC CC_REGNUM)
9879	(compare:CC (match_operand:SI 1 "register_operand" "")
9880		    (match_operand:SI 2 "arm_rhs_operand" "")))
9881   (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9882	      (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9883   (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9884	      (set (match_dup 0) (const_int 1)))
9885   (match_scratch:SI 3 "r")]
9886  "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9887  [(set (match_dup 3) (match_dup 4))
9888   (parallel
9889    [(set (reg:CC CC_REGNUM)
9890	  (compare:CC (const_int 0) (match_dup 3)))
9891     (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9892   (set (match_dup 0)
9893	(plus:SI (plus:SI (match_dup 0) (match_dup 3))
9894		 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9895  "
9896  if (CONST_INT_P (operands[2]))
9897    operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9898  else
9899    operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9900  ")
9901
9902(define_insn "*cond_move"
9903  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9904	(if_then_else:SI (match_operator 3 "equality_operator"
9905			  [(match_operator 4 "arm_comparison_operator"
9906			    [(match_operand 5 "cc_register" "") (const_int 0)])
9907			   (const_int 0)])
9908			 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9909			 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9910  "TARGET_ARM"
9911  "*
9912    if (GET_CODE (operands[3]) == NE)
9913      {
9914        if (which_alternative != 1)
9915	  output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9916        if (which_alternative != 0)
9917	  output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9918        return \"\";
9919      }
9920    if (which_alternative != 0)
9921      output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9922    if (which_alternative != 1)
9923      output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9924    return \"\";
9925  "
9926  [(set_attr "conds" "use")
9927   (set_attr_alternative "type"
9928                         [(if_then_else (match_operand 2 "const_int_operand" "")
9929                                        (const_string "mov_imm")
9930                                        (const_string "mov_reg"))
9931                          (if_then_else (match_operand 1 "const_int_operand" "")
9932                                        (const_string "mov_imm")
9933                                        (const_string "mov_reg"))
9934                          (const_string "multiple")])
9935   (set_attr "length" "4,4,8")]
9936)
9937
9938(define_insn "*cond_arith"
9939  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9940        (match_operator:SI 5 "shiftable_operator"
9941	 [(match_operator:SI 4 "arm_comparison_operator"
9942           [(match_operand:SI 2 "s_register_operand" "r,r")
9943	    (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9944          (match_operand:SI 1 "s_register_operand" "0,?r")]))
9945   (clobber (reg:CC CC_REGNUM))]
9946  "TARGET_ARM"
9947  "*
9948    if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9949      return \"%i5\\t%0, %1, %2, lsr #31\";
9950
9951    output_asm_insn (\"cmp\\t%2, %3\", operands);
9952    if (GET_CODE (operands[5]) == AND)
9953      output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9954    else if (GET_CODE (operands[5]) == MINUS)
9955      output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9956    else if (which_alternative != 0)
9957      output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9958    return \"%i5%d4\\t%0, %1, #1\";
9959  "
9960  [(set_attr "conds" "clob")
9961   (set_attr "length" "12")
9962   (set_attr "type" "multiple")]
9963)
9964
9965(define_insn "*cond_sub"
9966  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9967        (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9968		  (match_operator:SI 4 "arm_comparison_operator"
9969                   [(match_operand:SI 2 "s_register_operand" "r,r")
9970		    (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9971   (clobber (reg:CC CC_REGNUM))]
9972  "TARGET_ARM"
9973  "*
9974    output_asm_insn (\"cmp\\t%2, %3\", operands);
9975    if (which_alternative != 0)
9976      output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9977    return \"sub%d4\\t%0, %1, #1\";
9978  "
9979  [(set_attr "conds" "clob")
9980   (set_attr "length" "8,12")
9981   (set_attr "type" "multiple")]
9982)
9983
9984(define_insn "*cmp_ite0"
9985  [(set (match_operand 6 "dominant_cc_register" "")
9986	(compare
9987	 (if_then_else:SI
9988	  (match_operator 4 "arm_comparison_operator"
9989	   [(match_operand:SI 0 "s_register_operand"
9990	        "l,l,l,r,r,r,r,r,r")
9991	    (match_operand:SI 1 "arm_add_operand"
9992	        "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9993	  (match_operator:SI 5 "arm_comparison_operator"
9994	   [(match_operand:SI 2 "s_register_operand"
9995	        "l,r,r,l,l,r,r,r,r")
9996	    (match_operand:SI 3 "arm_add_operand"
9997	        "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9998	  (const_int 0))
9999	 (const_int 0)))]
10000  "TARGET_32BIT"
10001  "*
10002  {
10003    static const char * const cmp1[NUM_OF_COND_CMP][2] =
10004    {
10005      {\"cmp%d5\\t%0, %1\",
10006       \"cmp%d4\\t%2, %3\"},
10007      {\"cmn%d5\\t%0, #%n1\",
10008       \"cmp%d4\\t%2, %3\"},
10009      {\"cmp%d5\\t%0, %1\",
10010       \"cmn%d4\\t%2, #%n3\"},
10011      {\"cmn%d5\\t%0, #%n1\",
10012       \"cmn%d4\\t%2, #%n3\"}
10013    };
10014    static const char * const cmp2[NUM_OF_COND_CMP][2] =
10015    {
10016      {\"cmp\\t%2, %3\",
10017       \"cmp\\t%0, %1\"},
10018      {\"cmp\\t%2, %3\",
10019       \"cmn\\t%0, #%n1\"},
10020      {\"cmn\\t%2, #%n3\",
10021       \"cmp\\t%0, %1\"},
10022      {\"cmn\\t%2, #%n3\",
10023       \"cmn\\t%0, #%n1\"}
10024    };
10025    static const char * const ite[2] =
10026    {
10027      \"it\\t%d5\",
10028      \"it\\t%d4\"
10029    };
10030    static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10031                                   CMP_CMP, CMN_CMP, CMP_CMP,
10032                                   CMN_CMP, CMP_CMN, CMN_CMN};
10033    int swap =
10034      comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10035
10036    output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10037    if (TARGET_THUMB2) {
10038      output_asm_insn (ite[swap], operands);
10039    }
10040    output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10041    return \"\";
10042  }"
10043  [(set_attr "conds" "set")
10044   (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10045   (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10046   (set_attr "type" "multiple")
10047   (set_attr_alternative "length"
10048      [(const_int 6)
10049       (const_int 8)
10050       (const_int 8)
10051       (const_int 8)
10052       (const_int 8)
10053       (if_then_else (eq_attr "is_thumb" "no")
10054           (const_int 8)
10055           (const_int 10))
10056       (if_then_else (eq_attr "is_thumb" "no")
10057           (const_int 8)
10058           (const_int 10))
10059       (if_then_else (eq_attr "is_thumb" "no")
10060           (const_int 8)
10061           (const_int 10))
10062       (if_then_else (eq_attr "is_thumb" "no")
10063           (const_int 8)
10064           (const_int 10))])]
10065)
10066
10067(define_insn "*cmp_ite1"
10068  [(set (match_operand 6 "dominant_cc_register" "")
10069	(compare
10070	 (if_then_else:SI
10071	  (match_operator 4 "arm_comparison_operator"
10072	   [(match_operand:SI 0 "s_register_operand"
10073	        "l,l,l,r,r,r,r,r,r")
10074	    (match_operand:SI 1 "arm_add_operand"
10075	        "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10076	  (match_operator:SI 5 "arm_comparison_operator"
10077	   [(match_operand:SI 2 "s_register_operand"
10078	        "l,r,r,l,l,r,r,r,r")
10079	    (match_operand:SI 3 "arm_add_operand"
10080	        "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10081	  (const_int 1))
10082	 (const_int 0)))]
10083  "TARGET_32BIT"
10084  "*
10085  {
10086    static const char * const cmp1[NUM_OF_COND_CMP][2] =
10087    {
10088      {\"cmp\\t%0, %1\",
10089       \"cmp\\t%2, %3\"},
10090      {\"cmn\\t%0, #%n1\",
10091       \"cmp\\t%2, %3\"},
10092      {\"cmp\\t%0, %1\",
10093       \"cmn\\t%2, #%n3\"},
10094      {\"cmn\\t%0, #%n1\",
10095       \"cmn\\t%2, #%n3\"}
10096    };
10097    static const char * const cmp2[NUM_OF_COND_CMP][2] =
10098    {
10099      {\"cmp%d4\\t%2, %3\",
10100       \"cmp%D5\\t%0, %1\"},
10101      {\"cmp%d4\\t%2, %3\",
10102       \"cmn%D5\\t%0, #%n1\"},
10103      {\"cmn%d4\\t%2, #%n3\",
10104       \"cmp%D5\\t%0, %1\"},
10105      {\"cmn%d4\\t%2, #%n3\",
10106       \"cmn%D5\\t%0, #%n1\"}
10107    };
10108    static const char * const ite[2] =
10109    {
10110      \"it\\t%d4\",
10111      \"it\\t%D5\"
10112    };
10113    static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10114                                   CMP_CMP, CMN_CMP, CMP_CMP,
10115                                   CMN_CMP, CMP_CMN, CMN_CMN};
10116    int swap =
10117      comparison_dominates_p (GET_CODE (operands[5]),
10118			      reverse_condition (GET_CODE (operands[4])));
10119
10120    output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10121    if (TARGET_THUMB2) {
10122      output_asm_insn (ite[swap], operands);
10123    }
10124    output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10125    return \"\";
10126  }"
10127  [(set_attr "conds" "set")
10128   (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10129   (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10130   (set_attr_alternative "length"
10131      [(const_int 6)
10132       (const_int 8)
10133       (const_int 8)
10134       (const_int 8)
10135       (const_int 8)
10136       (if_then_else (eq_attr "is_thumb" "no")
10137           (const_int 8)
10138           (const_int 10))
10139       (if_then_else (eq_attr "is_thumb" "no")
10140           (const_int 8)
10141           (const_int 10))
10142       (if_then_else (eq_attr "is_thumb" "no")
10143           (const_int 8)
10144           (const_int 10))
10145       (if_then_else (eq_attr "is_thumb" "no")
10146           (const_int 8)
10147           (const_int 10))])
10148   (set_attr "type" "multiple")]
10149)
10150
10151(define_insn "*cmp_and"
10152  [(set (match_operand 6 "dominant_cc_register" "")
10153	(compare
10154	 (and:SI
10155	  (match_operator 4 "arm_comparison_operator"
10156	   [(match_operand:SI 0 "s_register_operand"
10157	        "l,l,l,r,r,r,r,r,r,r")
10158	    (match_operand:SI 1 "arm_add_operand"
10159	        "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10160	  (match_operator:SI 5 "arm_comparison_operator"
10161	   [(match_operand:SI 2 "s_register_operand"
10162	        "l,r,r,l,l,r,r,r,r,r")
10163	    (match_operand:SI 3 "arm_add_operand"
10164	        "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10165	 (const_int 0)))]
10166  "TARGET_32BIT"
10167  "*
10168  {
10169    static const char *const cmp1[NUM_OF_COND_CMP][2] =
10170    {
10171      {\"cmp%d5\\t%0, %1\",
10172       \"cmp%d4\\t%2, %3\"},
10173      {\"cmn%d5\\t%0, #%n1\",
10174       \"cmp%d4\\t%2, %3\"},
10175      {\"cmp%d5\\t%0, %1\",
10176       \"cmn%d4\\t%2, #%n3\"},
10177      {\"cmn%d5\\t%0, #%n1\",
10178       \"cmn%d4\\t%2, #%n3\"}
10179    };
10180    static const char *const cmp2[NUM_OF_COND_CMP][2] =
10181    {
10182      {\"cmp\\t%2, %3\",
10183       \"cmp\\t%0, %1\"},
10184      {\"cmp\\t%2, %3\",
10185       \"cmn\\t%0, #%n1\"},
10186      {\"cmn\\t%2, #%n3\",
10187       \"cmp\\t%0, %1\"},
10188      {\"cmn\\t%2, #%n3\",
10189       \"cmn\\t%0, #%n1\"}
10190    };
10191    static const char *const ite[2] =
10192    {
10193      \"it\\t%d5\",
10194      \"it\\t%d4\"
10195    };
10196    static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10197                                  CMP_CMP, CMN_CMP, CMP_CMP,
10198                                  CMP_CMP, CMN_CMP, CMP_CMN,
10199				  CMN_CMN};
10200    int swap =
10201      comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10202
10203    output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10204    if (TARGET_THUMB2) {
10205      output_asm_insn (ite[swap], operands);
10206    }
10207    output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10208    return \"\";
10209  }"
10210  [(set_attr "conds" "set")
10211   (set_attr "predicable" "no")
10212   (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10213   (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10214   (set_attr_alternative "length"
10215      [(const_int 6)
10216       (const_int 8)
10217       (const_int 8)
10218       (const_int 8)
10219       (const_int 8)
10220       (const_int 6)
10221       (if_then_else (eq_attr "is_thumb" "no")
10222           (const_int 8)
10223           (const_int 10))
10224       (if_then_else (eq_attr "is_thumb" "no")
10225           (const_int 8)
10226           (const_int 10))
10227       (if_then_else (eq_attr "is_thumb" "no")
10228           (const_int 8)
10229           (const_int 10))
10230       (if_then_else (eq_attr "is_thumb" "no")
10231           (const_int 8)
10232           (const_int 10))])
10233   (set_attr "type" "multiple")]
10234)
10235
10236(define_insn "*cmp_ior"
10237  [(set (match_operand 6 "dominant_cc_register" "")
10238	(compare
10239	 (ior:SI
10240	  (match_operator 4 "arm_comparison_operator"
10241	   [(match_operand:SI 0 "s_register_operand"
10242	        "l,l,l,r,r,r,r,r,r,r")
10243	    (match_operand:SI 1 "arm_add_operand"
10244	        "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10245	  (match_operator:SI 5 "arm_comparison_operator"
10246	   [(match_operand:SI 2 "s_register_operand"
10247	        "l,r,r,l,l,r,r,r,r,r")
10248	    (match_operand:SI 3 "arm_add_operand"
10249	        "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10250	 (const_int 0)))]
10251  "TARGET_32BIT"
10252  "*
10253  {
10254    static const char *const cmp1[NUM_OF_COND_CMP][2] =
10255    {
10256      {\"cmp\\t%0, %1\",
10257       \"cmp\\t%2, %3\"},
10258      {\"cmn\\t%0, #%n1\",
10259       \"cmp\\t%2, %3\"},
10260      {\"cmp\\t%0, %1\",
10261       \"cmn\\t%2, #%n3\"},
10262      {\"cmn\\t%0, #%n1\",
10263       \"cmn\\t%2, #%n3\"}
10264    };
10265    static const char *const cmp2[NUM_OF_COND_CMP][2] =
10266    {
10267      {\"cmp%D4\\t%2, %3\",
10268       \"cmp%D5\\t%0, %1\"},
10269      {\"cmp%D4\\t%2, %3\",
10270       \"cmn%D5\\t%0, #%n1\"},
10271      {\"cmn%D4\\t%2, #%n3\",
10272       \"cmp%D5\\t%0, %1\"},
10273      {\"cmn%D4\\t%2, #%n3\",
10274       \"cmn%D5\\t%0, #%n1\"}
10275    };
10276    static const char *const ite[2] =
10277    {
10278      \"it\\t%D4\",
10279      \"it\\t%D5\"
10280    };
10281    static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10282                                  CMP_CMP, CMN_CMP, CMP_CMP,
10283				  CMP_CMP, CMN_CMP, CMP_CMN,
10284				  CMN_CMN};
10285    int swap =
10286      comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10287
10288    output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10289    if (TARGET_THUMB2) {
10290      output_asm_insn (ite[swap], operands);
10291    }
10292    output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10293    return \"\";
10294  }
10295  "
10296  [(set_attr "conds" "set")
10297   (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10298   (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10299   (set_attr_alternative "length"
10300      [(const_int 6)
10301       (const_int 8)
10302       (const_int 8)
10303       (const_int 8)
10304       (const_int 8)
10305       (const_int 6)
10306       (if_then_else (eq_attr "is_thumb" "no")
10307           (const_int 8)
10308           (const_int 10))
10309       (if_then_else (eq_attr "is_thumb" "no")
10310           (const_int 8)
10311           (const_int 10))
10312       (if_then_else (eq_attr "is_thumb" "no")
10313           (const_int 8)
10314           (const_int 10))
10315       (if_then_else (eq_attr "is_thumb" "no")
10316           (const_int 8)
10317           (const_int 10))])
10318   (set_attr "type" "multiple")]
10319)
10320
10321(define_insn_and_split "*ior_scc_scc"
10322  [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10323	(ior:SI (match_operator:SI 3 "arm_comparison_operator"
10324		 [(match_operand:SI 1 "s_register_operand" "l,r")
10325		  (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10326		(match_operator:SI 6 "arm_comparison_operator"
10327		 [(match_operand:SI 4 "s_register_operand" "l,r")
10328		  (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10329   (clobber (reg:CC CC_REGNUM))]
10330  "TARGET_32BIT
10331   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10332       != CCmode)"
10333  "#"
10334  "TARGET_32BIT && reload_completed"
10335  [(set (match_dup 7)
10336	(compare
10337	 (ior:SI
10338	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10339	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10340	 (const_int 0)))
10341   (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10342  "operands[7]
10343     = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10344						  DOM_CC_X_OR_Y),
10345		    CC_REGNUM);"
10346  [(set_attr "conds" "clob")
10347   (set_attr "enabled_for_short_it" "yes,no")
10348   (set_attr "length" "16")
10349   (set_attr "type" "multiple")]
10350)
10351
10352; If the above pattern is followed by a CMP insn, then the compare is
10353; redundant, since we can rework the conditional instruction that follows.
10354(define_insn_and_split "*ior_scc_scc_cmp"
10355  [(set (match_operand 0 "dominant_cc_register" "")
10356	(compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10357			  [(match_operand:SI 1 "s_register_operand" "l,r")
10358			   (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10359			 (match_operator:SI 6 "arm_comparison_operator"
10360			  [(match_operand:SI 4 "s_register_operand" "l,r")
10361			   (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10362		 (const_int 0)))
10363   (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10364	(ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10365		(match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10366  "TARGET_32BIT"
10367  "#"
10368  "TARGET_32BIT && reload_completed"
10369  [(set (match_dup 0)
10370	(compare
10371	 (ior:SI
10372	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10373	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10374	 (const_int 0)))
10375   (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10376  ""
10377  [(set_attr "conds" "set")
10378   (set_attr "enabled_for_short_it" "yes,no")
10379   (set_attr "length" "16")
10380   (set_attr "type" "multiple")]
10381)
10382
10383(define_insn_and_split "*and_scc_scc"
10384  [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10385	(and:SI (match_operator:SI 3 "arm_comparison_operator"
10386		 [(match_operand:SI 1 "s_register_operand" "l,r")
10387		  (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10388		(match_operator:SI 6 "arm_comparison_operator"
10389		 [(match_operand:SI 4 "s_register_operand" "l,r")
10390		  (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10391   (clobber (reg:CC CC_REGNUM))]
10392  "TARGET_32BIT
10393   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10394       != CCmode)"
10395  "#"
10396  "TARGET_32BIT && reload_completed
10397   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10398       != CCmode)"
10399  [(set (match_dup 7)
10400	(compare
10401	 (and:SI
10402	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10403	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10404	 (const_int 0)))
10405   (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10406  "operands[7]
10407     = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10408						  DOM_CC_X_AND_Y),
10409		    CC_REGNUM);"
10410  [(set_attr "conds" "clob")
10411   (set_attr "enabled_for_short_it" "yes,no")
10412   (set_attr "length" "16")
10413   (set_attr "type" "multiple")]
10414)
10415
10416; If the above pattern is followed by a CMP insn, then the compare is
10417; redundant, since we can rework the conditional instruction that follows.
10418(define_insn_and_split "*and_scc_scc_cmp"
10419  [(set (match_operand 0 "dominant_cc_register" "")
10420	(compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10421			  [(match_operand:SI 1 "s_register_operand" "l,r")
10422			   (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10423			 (match_operator:SI 6 "arm_comparison_operator"
10424			  [(match_operand:SI 4 "s_register_operand" "l,r")
10425			   (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10426		 (const_int 0)))
10427   (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10428	(and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10429		(match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10430  "TARGET_32BIT"
10431  "#"
10432  "TARGET_32BIT && reload_completed"
10433  [(set (match_dup 0)
10434	(compare
10435	 (and:SI
10436	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10437	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10438	 (const_int 0)))
10439   (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10440  ""
10441  [(set_attr "conds" "set")
10442   (set_attr "enabled_for_short_it" "yes,no")
10443   (set_attr "length" "16")
10444   (set_attr "type" "multiple")]
10445)
10446
10447;; If there is no dominance in the comparison, then we can still save an
10448;; instruction in the AND case, since we can know that the second compare
10449;; need only zero the value if false (if true, then the value is already
10450;; correct).
10451(define_insn_and_split "*and_scc_scc_nodom"
10452  [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10453	(and:SI (match_operator:SI 3 "arm_comparison_operator"
10454		 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10455		  (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10456		(match_operator:SI 6 "arm_comparison_operator"
10457		 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10458		  (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10459   (clobber (reg:CC CC_REGNUM))]
10460  "TARGET_32BIT
10461   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10462       == CCmode)"
10463  "#"
10464  "TARGET_32BIT && reload_completed"
10465  [(parallel [(set (match_dup 0)
10466		   (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10467	      (clobber (reg:CC CC_REGNUM))])
10468   (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10469   (set (match_dup 0)
10470	(if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10471			 (match_dup 0)
10472			 (const_int 0)))]
10473  "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10474					      operands[4], operands[5]),
10475			      CC_REGNUM);
10476   operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10477				  operands[5]);"
10478  [(set_attr "conds" "clob")
10479   (set_attr "length" "20")
10480   (set_attr "type" "multiple")]
10481)
10482
10483(define_split
10484  [(set (reg:CC_NZ CC_REGNUM)
10485	(compare:CC_NZ (ior:SI
10486			  (and:SI (match_operand:SI 0 "s_register_operand" "")
10487				  (const_int 1))
10488			  (match_operator:SI 1 "arm_comparison_operator"
10489			   [(match_operand:SI 2 "s_register_operand" "")
10490			    (match_operand:SI 3 "arm_add_operand" "")]))
10491			 (const_int 0)))
10492   (clobber (match_operand:SI 4 "s_register_operand" ""))]
10493  "TARGET_ARM"
10494  [(set (match_dup 4)
10495	(ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10496		(match_dup 0)))
10497   (set (reg:CC_NZ CC_REGNUM)
10498	(compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10499			 (const_int 0)))]
10500  "")
10501
10502(define_split
10503  [(set (reg:CC_NZ CC_REGNUM)
10504	(compare:CC_NZ (ior:SI
10505			  (match_operator:SI 1 "arm_comparison_operator"
10506			   [(match_operand:SI 2 "s_register_operand" "")
10507			    (match_operand:SI 3 "arm_add_operand" "")])
10508			  (and:SI (match_operand:SI 0 "s_register_operand" "")
10509				  (const_int 1)))
10510			 (const_int 0)))
10511   (clobber (match_operand:SI 4 "s_register_operand" ""))]
10512  "TARGET_ARM"
10513  [(set (match_dup 4)
10514	(ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10515		(match_dup 0)))
10516   (set (reg:CC_NZ CC_REGNUM)
10517	(compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10518			 (const_int 0)))]
10519  "")
10520;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10521
10522(define_insn_and_split "*negscc"
10523  [(set (match_operand:SI 0 "s_register_operand" "=r")
10524	(neg:SI (match_operator 3 "arm_comparison_operator"
10525		 [(match_operand:SI 1 "s_register_operand" "r")
10526		  (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10527   (clobber (reg:CC CC_REGNUM))]
10528  "TARGET_ARM"
10529  "#"
10530  "&& reload_completed"
10531  [(const_int 0)]
10532  {
10533    rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10534
10535    if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10536       {
10537         /* Emit mov\\t%0, %1, asr #31 */
10538         emit_insn (gen_rtx_SET (operands[0],
10539                                 gen_rtx_ASHIFTRT (SImode,
10540                                                   operands[1],
10541                                                   GEN_INT (31))));
10542         DONE;
10543       }
10544     else if (GET_CODE (operands[3]) == NE)
10545       {
10546        /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10547        if (CONST_INT_P (operands[2]))
10548          emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10549                                        gen_int_mode (-INTVAL (operands[2]),
10550						      SImode)));
10551        else
10552          emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10553
10554        emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10555                                      gen_rtx_NE (SImode,
10556                                                  cc_reg,
10557                                                  const0_rtx),
10558                                      gen_rtx_SET (operands[0],
10559                                                   GEN_INT (~0))));
10560        DONE;
10561      }
10562    else
10563      {
10564        /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10565        emit_insn (gen_rtx_SET (cc_reg,
10566                                gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10567        enum rtx_code rc = GET_CODE (operands[3]);
10568
10569        rc = reverse_condition (rc);
10570        emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10571                                      gen_rtx_fmt_ee (rc,
10572                                                      VOIDmode,
10573                                                      cc_reg,
10574                                                      const0_rtx),
10575                                      gen_rtx_SET (operands[0], const0_rtx)));
10576        rc = GET_CODE (operands[3]);
10577        emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10578                                      gen_rtx_fmt_ee (rc,
10579                                                      VOIDmode,
10580                                                      cc_reg,
10581                                                      const0_rtx),
10582                                      gen_rtx_SET (operands[0],
10583                                                   GEN_INT (~0))));
10584        DONE;
10585      }
10586     FAIL;
10587  }
10588  [(set_attr "conds" "clob")
10589   (set_attr "length" "12")
10590   (set_attr "type" "multiple")]
10591)
10592
10593(define_insn_and_split "movcond_addsi"
10594  [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
10595	(if_then_else:SI
10596	 (match_operator 5 "comparison_operator"
10597	  [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
10598	            (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
10599            (const_int 0)])
10600	 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
10601	 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
10602   (clobber (reg:CC CC_REGNUM))]
10603   "TARGET_32BIT"
10604   "#"
10605   "&& reload_completed"
10606  [(set (reg:CC_NZ CC_REGNUM)
10607	(compare:CC_NZ
10608	 (plus:SI (match_dup 3)
10609		  (match_dup 4))
10610	 (const_int 0)))
10611   (set (match_dup 0) (match_dup 1))
10612   (cond_exec (match_dup 6)
10613	      (set (match_dup 0) (match_dup 2)))]
10614  "
10615  {
10616    machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
10617					     operands[3], operands[4]);
10618    enum rtx_code rc = GET_CODE (operands[5]);
10619    operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10620    gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
10621    if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
10622      rc = reverse_condition (rc);
10623    else
10624      std::swap (operands[1], operands[2]);
10625
10626    operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10627  }
10628  "
10629  [(set_attr "conds" "clob")
10630   (set_attr "enabled_for_short_it" "no,yes,yes")
10631   (set_attr "type" "multiple")]
10632)
10633
10634(define_insn "movcond"
10635  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10636	(if_then_else:SI
10637	 (match_operator 5 "arm_comparison_operator"
10638	  [(match_operand:SI 3 "s_register_operand" "r,r,r")
10639	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10640	 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10641	 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10642   (clobber (reg:CC CC_REGNUM))]
10643  "TARGET_ARM"
10644  "*
10645  if (GET_CODE (operands[5]) == LT
10646      && (operands[4] == const0_rtx))
10647    {
10648      if (which_alternative != 1 && REG_P (operands[1]))
10649	{
10650	  if (operands[2] == const0_rtx)
10651	    return \"and\\t%0, %1, %3, asr #31\";
10652	  return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10653	}
10654      else if (which_alternative != 0 && REG_P (operands[2]))
10655	{
10656	  if (operands[1] == const0_rtx)
10657	    return \"bic\\t%0, %2, %3, asr #31\";
10658	  return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10659	}
10660      /* The only case that falls through to here is when both ops 1 & 2
10661	 are constants.  */
10662    }
10663
10664  if (GET_CODE (operands[5]) == GE
10665      && (operands[4] == const0_rtx))
10666    {
10667      if (which_alternative != 1 && REG_P (operands[1]))
10668	{
10669	  if (operands[2] == const0_rtx)
10670	    return \"bic\\t%0, %1, %3, asr #31\";
10671	  return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10672	}
10673      else if (which_alternative != 0 && REG_P (operands[2]))
10674	{
10675	  if (operands[1] == const0_rtx)
10676	    return \"and\\t%0, %2, %3, asr #31\";
10677	  return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10678	}
10679      /* The only case that falls through to here is when both ops 1 & 2
10680	 are constants.  */
10681    }
10682  if (CONST_INT_P (operands[4])
10683      && !const_ok_for_arm (INTVAL (operands[4])))
10684    output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10685  else
10686    output_asm_insn (\"cmp\\t%3, %4\", operands);
10687  if (which_alternative != 0)
10688    output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10689  if (which_alternative != 1)
10690    output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10691  return \"\";
10692  "
10693  [(set_attr "conds" "clob")
10694   (set_attr "length" "8,8,12")
10695   (set_attr "type" "multiple")]
10696)
10697
10698;; ??? The patterns below need checking for Thumb-2 usefulness.
10699
10700(define_insn "*ifcompare_plus_move"
10701  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10702	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
10703			  [(match_operand:SI 4 "s_register_operand" "r,r")
10704			   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10705			 (plus:SI
10706			  (match_operand:SI 2 "s_register_operand" "r,r")
10707			  (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10708			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10709   (clobber (reg:CC CC_REGNUM))]
10710  "TARGET_ARM"
10711  "#"
10712  [(set_attr "conds" "clob")
10713   (set_attr "length" "8,12")
10714   (set_attr "type" "multiple")]
10715)
10716
10717(define_insn "*if_plus_move"
10718  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10719	(if_then_else:SI
10720	 (match_operator 4 "arm_comparison_operator"
10721	  [(match_operand 5 "cc_register" "") (const_int 0)])
10722	 (plus:SI
10723	  (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10724	  (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10725	 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10726  "TARGET_ARM"
10727  "@
10728   add%d4\\t%0, %2, %3
10729   sub%d4\\t%0, %2, #%n3
10730   add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10731   sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10732  [(set_attr "conds" "use")
10733   (set_attr "length" "4,4,8,8")
10734   (set_attr_alternative "type"
10735                         [(if_then_else (match_operand 3 "const_int_operand" "")
10736                                        (const_string "alu_imm" )
10737                                        (const_string "alu_sreg"))
10738                          (const_string "alu_imm")
10739                          (const_string "multiple")
10740                          (const_string "multiple")])]
10741)
10742
10743(define_insn "*ifcompare_move_plus"
10744  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10745	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
10746			  [(match_operand:SI 4 "s_register_operand" "r,r")
10747			   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10748			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10749			 (plus:SI
10750			  (match_operand:SI 2 "s_register_operand" "r,r")
10751			  (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10752   (clobber (reg:CC CC_REGNUM))]
10753  "TARGET_ARM"
10754  "#"
10755  [(set_attr "conds" "clob")
10756   (set_attr "length" "8,12")
10757   (set_attr "type" "multiple")]
10758)
10759
10760(define_insn "*if_move_plus"
10761  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10762	(if_then_else:SI
10763	 (match_operator 4 "arm_comparison_operator"
10764	  [(match_operand 5 "cc_register" "") (const_int 0)])
10765	 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10766	 (plus:SI
10767	  (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10768	  (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10769  "TARGET_ARM"
10770  "@
10771   add%D4\\t%0, %2, %3
10772   sub%D4\\t%0, %2, #%n3
10773   add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10774   sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10775  [(set_attr "conds" "use")
10776   (set_attr "length" "4,4,8,8")
10777   (set_attr_alternative "type"
10778                         [(if_then_else (match_operand 3 "const_int_operand" "")
10779                                        (const_string "alu_imm" )
10780                                        (const_string "alu_sreg"))
10781                          (const_string "alu_imm")
10782                          (const_string "multiple")
10783                          (const_string "multiple")])]
10784)
10785
10786(define_insn "*ifcompare_arith_arith"
10787  [(set (match_operand:SI 0 "s_register_operand" "=r")
10788	(if_then_else:SI (match_operator 9 "arm_comparison_operator"
10789			  [(match_operand:SI 5 "s_register_operand" "r")
10790			   (match_operand:SI 6 "arm_add_operand" "rIL")])
10791			 (match_operator:SI 8 "shiftable_operator"
10792			  [(match_operand:SI 1 "s_register_operand" "r")
10793			   (match_operand:SI 2 "arm_rhs_operand" "rI")])
10794			 (match_operator:SI 7 "shiftable_operator"
10795			  [(match_operand:SI 3 "s_register_operand" "r")
10796			   (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10797   (clobber (reg:CC CC_REGNUM))]
10798  "TARGET_ARM"
10799  "#"
10800  [(set_attr "conds" "clob")
10801   (set_attr "length" "12")
10802   (set_attr "type" "multiple")]
10803)
10804
10805(define_insn "*if_arith_arith"
10806  [(set (match_operand:SI 0 "s_register_operand" "=r")
10807	(if_then_else:SI (match_operator 5 "arm_comparison_operator"
10808			  [(match_operand 8 "cc_register" "") (const_int 0)])
10809			 (match_operator:SI 6 "shiftable_operator"
10810			  [(match_operand:SI 1 "s_register_operand" "r")
10811			   (match_operand:SI 2 "arm_rhs_operand" "rI")])
10812			 (match_operator:SI 7 "shiftable_operator"
10813			  [(match_operand:SI 3 "s_register_operand" "r")
10814			   (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10815  "TARGET_ARM"
10816  "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10817  [(set_attr "conds" "use")
10818   (set_attr "length" "8")
10819   (set_attr "type" "multiple")]
10820)
10821
10822(define_insn "*ifcompare_arith_move"
10823  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10824	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
10825			  [(match_operand:SI 2 "s_register_operand" "r,r")
10826			   (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10827			 (match_operator:SI 7 "shiftable_operator"
10828			  [(match_operand:SI 4 "s_register_operand" "r,r")
10829			   (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10830			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10831   (clobber (reg:CC CC_REGNUM))]
10832  "TARGET_ARM"
10833  "*
10834  /* If we have an operation where (op x 0) is the identity operation and
10835     the conditional operator is LT or GE and we are comparing against zero and
10836     everything is in registers then we can do this in two instructions.  */
10837  if (operands[3] == const0_rtx
10838      && GET_CODE (operands[7]) != AND
10839      && REG_P (operands[5])
10840      && REG_P (operands[1])
10841      && REGNO (operands[1]) == REGNO (operands[4])
10842      && REGNO (operands[4]) != REGNO (operands[0]))
10843    {
10844      if (GET_CODE (operands[6]) == LT)
10845	return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10846      else if (GET_CODE (operands[6]) == GE)
10847	return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10848    }
10849  if (CONST_INT_P (operands[3])
10850      && !const_ok_for_arm (INTVAL (operands[3])))
10851    output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10852  else
10853    output_asm_insn (\"cmp\\t%2, %3\", operands);
10854  output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10855  if (which_alternative != 0)
10856    return \"mov%D6\\t%0, %1\";
10857  return \"\";
10858  "
10859  [(set_attr "conds" "clob")
10860   (set_attr "length" "8,12")
10861   (set_attr "type" "multiple")]
10862)
10863
10864(define_insn "*if_arith_move"
10865  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10866	(if_then_else:SI (match_operator 4 "arm_comparison_operator"
10867			  [(match_operand 6 "cc_register" "") (const_int 0)])
10868			 (match_operator:SI 5 "shiftable_operator"
10869			  [(match_operand:SI 2 "s_register_operand" "r,r")
10870			   (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10871			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10872  "TARGET_ARM"
10873  "@
10874   %I5%d4\\t%0, %2, %3
10875   %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10876  [(set_attr "conds" "use")
10877   (set_attr "length" "4,8")
10878   (set_attr_alternative "type"
10879                         [(if_then_else (match_operand 3 "const_int_operand" "")
10880                                (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
10881                                              (const_string "alu_shift_imm_lsl_1to4")
10882                                              (const_string "alu_shift_imm_other"))
10883                                        (const_string "alu_shift_reg"))
10884                          (const_string "multiple")])]
10885)
10886
10887(define_insn "*ifcompare_move_arith"
10888  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10889	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
10890			  [(match_operand:SI 4 "s_register_operand" "r,r")
10891			   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10892			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10893			 (match_operator:SI 7 "shiftable_operator"
10894			  [(match_operand:SI 2 "s_register_operand" "r,r")
10895			   (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10896   (clobber (reg:CC CC_REGNUM))]
10897  "TARGET_ARM"
10898  "*
10899  /* If we have an operation where (op x 0) is the identity operation and
10900     the conditional operator is LT or GE and we are comparing against zero and
10901     everything is in registers then we can do this in two instructions */
10902  if (operands[5] == const0_rtx
10903      && GET_CODE (operands[7]) != AND
10904      && REG_P (operands[3])
10905      && REG_P (operands[1])
10906      && REGNO (operands[1]) == REGNO (operands[2])
10907      && REGNO (operands[2]) != REGNO (operands[0]))
10908    {
10909      if (GET_CODE (operands[6]) == GE)
10910	return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10911      else if (GET_CODE (operands[6]) == LT)
10912	return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10913    }
10914
10915  if (CONST_INT_P (operands[5])
10916      && !const_ok_for_arm (INTVAL (operands[5])))
10917    output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10918  else
10919    output_asm_insn (\"cmp\\t%4, %5\", operands);
10920
10921  if (which_alternative != 0)
10922    output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10923  return \"%I7%D6\\t%0, %2, %3\";
10924  "
10925  [(set_attr "conds" "clob")
10926   (set_attr "length" "8,12")
10927   (set_attr "type" "multiple")]
10928)
10929
10930(define_insn "*if_move_arith"
10931  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10932	(if_then_else:SI
10933	 (match_operator 4 "arm_comparison_operator"
10934	  [(match_operand 6 "cc_register" "") (const_int 0)])
10935	 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10936	 (match_operator:SI 5 "shiftable_operator"
10937	  [(match_operand:SI 2 "s_register_operand" "r,r")
10938	   (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10939  "TARGET_ARM"
10940  "@
10941   %I5%D4\\t%0, %2, %3
10942   %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10943  [(set_attr "conds" "use")
10944   (set_attr "length" "4,8")
10945   (set_attr_alternative "type"
10946                         [(if_then_else (match_operand 3 "const_int_operand" "")
10947                                (if_then_else (match_operand 5 "alu_shift_operator_lsl_1_to_4")
10948                                              (const_string "alu_shift_imm_lsl_1to4")
10949                                              (const_string "alu_shift_imm_other"))
10950                                        (const_string "alu_shift_reg"))
10951                          (const_string "multiple")])]
10952)
10953
10954(define_insn "*ifcompare_move_not"
10955  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10956	(if_then_else:SI
10957	 (match_operator 5 "arm_comparison_operator"
10958	  [(match_operand:SI 3 "s_register_operand" "r,r")
10959	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10960	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10961	 (not:SI
10962	  (match_operand:SI 2 "s_register_operand" "r,r"))))
10963   (clobber (reg:CC CC_REGNUM))]
10964  "TARGET_ARM"
10965  "#"
10966  [(set_attr "conds" "clob")
10967   (set_attr "length" "8,12")
10968   (set_attr "type" "multiple")]
10969)
10970
10971(define_insn "*if_move_not"
10972  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10973	(if_then_else:SI
10974	 (match_operator 4 "arm_comparison_operator"
10975	  [(match_operand 3 "cc_register" "") (const_int 0)])
10976	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10977	 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10978  "TARGET_ARM"
10979  "@
10980   mvn%D4\\t%0, %2
10981   mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10982   mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10983  [(set_attr "conds" "use")
10984   (set_attr "type" "mvn_reg")
10985   (set_attr "length" "4,8,8")
10986   (set_attr "type" "mvn_reg,multiple,multiple")]
10987)
10988
10989(define_insn "*ifcompare_not_move"
10990  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10991	(if_then_else:SI
10992	 (match_operator 5 "arm_comparison_operator"
10993	  [(match_operand:SI 3 "s_register_operand" "r,r")
10994	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10995	 (not:SI
10996	  (match_operand:SI 2 "s_register_operand" "r,r"))
10997	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10998   (clobber (reg:CC CC_REGNUM))]
10999  "TARGET_ARM"
11000  "#"
11001  [(set_attr "conds" "clob")
11002   (set_attr "length" "8,12")
11003   (set_attr "type" "multiple")]
11004)
11005
11006(define_insn "*if_not_move"
11007  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11008	(if_then_else:SI
11009	 (match_operator 4 "arm_comparison_operator"
11010	  [(match_operand 3 "cc_register" "") (const_int 0)])
11011	 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
11012	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11013  "TARGET_ARM"
11014  "@
11015   mvn%d4\\t%0, %2
11016   mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
11017   mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
11018  [(set_attr "conds" "use")
11019   (set_attr "type" "mvn_reg,multiple,multiple")
11020   (set_attr "length" "4,8,8")]
11021)
11022
11023(define_insn "*ifcompare_shift_move"
11024  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11025	(if_then_else:SI
11026	 (match_operator 6 "arm_comparison_operator"
11027	  [(match_operand:SI 4 "s_register_operand" "r,r")
11028	   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11029	 (match_operator:SI 7 "shift_operator"
11030	  [(match_operand:SI 2 "s_register_operand" "r,r")
11031	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11032	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11033   (clobber (reg:CC CC_REGNUM))]
11034  "TARGET_ARM"
11035  "#"
11036  [(set_attr "conds" "clob")
11037   (set_attr "length" "8,12")
11038   (set_attr "type" "multiple")]
11039)
11040
11041(define_insn "*if_shift_move"
11042  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11043	(if_then_else:SI
11044	 (match_operator 5 "arm_comparison_operator"
11045	  [(match_operand 6 "cc_register" "") (const_int 0)])
11046	 (match_operator:SI 4 "shift_operator"
11047	  [(match_operand:SI 2 "s_register_operand" "r,r,r")
11048	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11049	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11050  "TARGET_ARM"
11051  "@
11052   mov%d5\\t%0, %2%S4
11053   mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11054   mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11055  [(set_attr "conds" "use")
11056   (set_attr "shift" "2")
11057   (set_attr "length" "4,8,8")
11058   (set_attr_alternative "type"
11059                         [(if_then_else (match_operand 3 "const_int_operand" "")
11060                                        (const_string "mov_shift" )
11061                                        (const_string "mov_shift_reg"))
11062                          (const_string "multiple")
11063                          (const_string "multiple")])]
11064)
11065
11066(define_insn "*ifcompare_move_shift"
11067  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11068	(if_then_else:SI
11069	 (match_operator 6 "arm_comparison_operator"
11070	  [(match_operand:SI 4 "s_register_operand" "r,r")
11071	   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11072	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11073	 (match_operator:SI 7 "shift_operator"
11074	  [(match_operand:SI 2 "s_register_operand" "r,r")
11075	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11076   (clobber (reg:CC CC_REGNUM))]
11077  "TARGET_ARM"
11078  "#"
11079  [(set_attr "conds" "clob")
11080   (set_attr "length" "8,12")
11081   (set_attr "type" "multiple")]
11082)
11083
11084(define_insn "*if_move_shift"
11085  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11086	(if_then_else:SI
11087	 (match_operator 5 "arm_comparison_operator"
11088	  [(match_operand 6 "cc_register" "") (const_int 0)])
11089	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11090	 (match_operator:SI 4 "shift_operator"
11091	  [(match_operand:SI 2 "s_register_operand" "r,r,r")
11092	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11093  "TARGET_ARM"
11094  "@
11095   mov%D5\\t%0, %2%S4
11096   mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11097   mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11098  [(set_attr "conds" "use")
11099   (set_attr "shift" "2")
11100   (set_attr "length" "4,8,8")
11101   (set_attr_alternative "type"
11102                         [(if_then_else (match_operand 3 "const_int_operand" "")
11103                                        (const_string "mov_shift" )
11104                                        (const_string "mov_shift_reg"))
11105                          (const_string "multiple")
11106                          (const_string "multiple")])]
11107)
11108
11109(define_insn "*ifcompare_shift_shift"
11110  [(set (match_operand:SI 0 "s_register_operand" "=r")
11111	(if_then_else:SI
11112	 (match_operator 7 "arm_comparison_operator"
11113	  [(match_operand:SI 5 "s_register_operand" "r")
11114	   (match_operand:SI 6 "arm_add_operand" "rIL")])
11115	 (match_operator:SI 8 "shift_operator"
11116	  [(match_operand:SI 1 "s_register_operand" "r")
11117	   (match_operand:SI 2 "arm_rhs_operand" "rM")])
11118	 (match_operator:SI 9 "shift_operator"
11119	  [(match_operand:SI 3 "s_register_operand" "r")
11120	   (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11121   (clobber (reg:CC CC_REGNUM))]
11122  "TARGET_ARM"
11123  "#"
11124  [(set_attr "conds" "clob")
11125   (set_attr "length" "12")
11126   (set_attr "type" "multiple")]
11127)
11128
11129(define_insn "*if_shift_shift"
11130  [(set (match_operand:SI 0 "s_register_operand" "=r")
11131	(if_then_else:SI
11132	 (match_operator 5 "arm_comparison_operator"
11133	  [(match_operand 8 "cc_register" "") (const_int 0)])
11134	 (match_operator:SI 6 "shift_operator"
11135	  [(match_operand:SI 1 "s_register_operand" "r")
11136	   (match_operand:SI 2 "arm_rhs_operand" "rM")])
11137	 (match_operator:SI 7 "shift_operator"
11138	  [(match_operand:SI 3 "s_register_operand" "r")
11139	   (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11140  "TARGET_ARM"
11141  "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11142  [(set_attr "conds" "use")
11143   (set_attr "shift" "1")
11144   (set_attr "length" "8")
11145   (set (attr "type") (if_then_else
11146		        (and (match_operand 2 "const_int_operand" "")
11147                             (match_operand 4 "const_int_operand" ""))
11148		      (const_string "mov_shift")
11149		      (const_string "mov_shift_reg")))]
11150)
11151
11152(define_insn "*ifcompare_not_arith"
11153  [(set (match_operand:SI 0 "s_register_operand" "=r")
11154	(if_then_else:SI
11155	 (match_operator 6 "arm_comparison_operator"
11156	  [(match_operand:SI 4 "s_register_operand" "r")
11157	   (match_operand:SI 5 "arm_add_operand" "rIL")])
11158	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11159	 (match_operator:SI 7 "shiftable_operator"
11160	  [(match_operand:SI 2 "s_register_operand" "r")
11161	   (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11162   (clobber (reg:CC CC_REGNUM))]
11163  "TARGET_ARM"
11164  "#"
11165  [(set_attr "conds" "clob")
11166   (set_attr "length" "12")
11167   (set_attr "type" "multiple")]
11168)
11169
11170(define_insn "*if_not_arith"
11171  [(set (match_operand:SI 0 "s_register_operand" "=r")
11172	(if_then_else:SI
11173	 (match_operator 5 "arm_comparison_operator"
11174	  [(match_operand 4 "cc_register" "") (const_int 0)])
11175	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11176	 (match_operator:SI 6 "shiftable_operator"
11177	  [(match_operand:SI 2 "s_register_operand" "r")
11178	   (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11179  "TARGET_ARM"
11180  "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11181  [(set_attr "conds" "use")
11182   (set_attr "type" "mvn_reg")
11183   (set_attr "length" "8")]
11184)
11185
11186(define_insn "*ifcompare_arith_not"
11187  [(set (match_operand:SI 0 "s_register_operand" "=r")
11188	(if_then_else:SI
11189	 (match_operator 6 "arm_comparison_operator"
11190	  [(match_operand:SI 4 "s_register_operand" "r")
11191	   (match_operand:SI 5 "arm_add_operand" "rIL")])
11192	 (match_operator:SI 7 "shiftable_operator"
11193	  [(match_operand:SI 2 "s_register_operand" "r")
11194	   (match_operand:SI 3 "arm_rhs_operand" "rI")])
11195	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11196   (clobber (reg:CC CC_REGNUM))]
11197  "TARGET_ARM"
11198  "#"
11199  [(set_attr "conds" "clob")
11200   (set_attr "length" "12")
11201   (set_attr "type" "multiple")]
11202)
11203
11204(define_insn "*if_arith_not"
11205  [(set (match_operand:SI 0 "s_register_operand" "=r")
11206	(if_then_else:SI
11207	 (match_operator 5 "arm_comparison_operator"
11208	  [(match_operand 4 "cc_register" "") (const_int 0)])
11209	 (match_operator:SI 6 "shiftable_operator"
11210	  [(match_operand:SI 2 "s_register_operand" "r")
11211	   (match_operand:SI 3 "arm_rhs_operand" "rI")])
11212	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11213  "TARGET_ARM"
11214  "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11215  [(set_attr "conds" "use")
11216   (set_attr "type" "multiple")
11217   (set_attr "length" "8")]
11218)
11219
11220(define_insn "*ifcompare_neg_move"
11221  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11222	(if_then_else:SI
11223	 (match_operator 5 "arm_comparison_operator"
11224	  [(match_operand:SI 3 "s_register_operand" "r,r")
11225	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11226	 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11227	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11228   (clobber (reg:CC CC_REGNUM))]
11229  "TARGET_ARM"
11230  "#"
11231  [(set_attr "conds" "clob")
11232   (set_attr "length" "8,12")
11233   (set_attr "type" "multiple")]
11234)
11235
11236(define_insn_and_split "*if_neg_move"
11237  [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11238	(if_then_else:SI
11239	 (match_operator 4 "arm_comparison_operator"
11240	  [(match_operand 3 "cc_register" "") (const_int 0)])
11241	 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
11242	 (match_operand:SI 1 "s_register_operand" "0,0")))]
11243  "TARGET_32BIT && !TARGET_COND_ARITH"
11244  "#"
11245  "&& reload_completed"
11246  [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
11247	      (set (match_dup 0) (neg:SI (match_dup 2))))]
11248  ""
11249  [(set_attr "conds" "use")
11250   (set_attr "length" "4")
11251   (set_attr "arch" "t2,32")
11252   (set_attr "enabled_for_short_it" "yes,no")
11253   (set_attr "type" "logic_shift_imm")]
11254)
11255
11256(define_insn "*ifcompare_move_neg"
11257  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11258	(if_then_else:SI
11259	 (match_operator 5 "arm_comparison_operator"
11260	  [(match_operand:SI 3 "s_register_operand" "r,r")
11261	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11262	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11263	 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11264   (clobber (reg:CC CC_REGNUM))]
11265  "TARGET_ARM"
11266  "#"
11267  [(set_attr "conds" "clob")
11268   (set_attr "length" "8,12")
11269   (set_attr "type" "multiple")]
11270)
11271
11272(define_insn_and_split "*if_move_neg"
11273  [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11274	(if_then_else:SI
11275	 (match_operator 4 "arm_comparison_operator"
11276	  [(match_operand 3 "cc_register" "") (const_int 0)])
11277	 (match_operand:SI 1 "s_register_operand" "0,0")
11278	 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
11279  "TARGET_32BIT"
11280  "#"
11281  "&& reload_completed"
11282  [(cond_exec (match_dup 5)
11283	      (set (match_dup 0) (neg:SI (match_dup 2))))]
11284  {
11285    machine_mode mode = GET_MODE (operands[3]);
11286    rtx_code rc = GET_CODE (operands[4]);
11287
11288    if (mode == CCFPmode || mode == CCFPEmode)
11289      rc = reverse_condition_maybe_unordered (rc);
11290    else
11291      rc = reverse_condition (rc);
11292
11293    operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
11294  }
11295  [(set_attr "conds" "use")
11296   (set_attr "length" "4")
11297   (set_attr "arch" "t2,32")
11298   (set_attr "enabled_for_short_it" "yes,no")
11299   (set_attr "type" "logic_shift_imm")]
11300)
11301
11302(define_insn "*arith_adjacentmem"
11303  [(set (match_operand:SI 0 "s_register_operand" "=r")
11304	(match_operator:SI 1 "shiftable_operator"
11305	 [(match_operand:SI 2 "memory_operand" "m")
11306	  (match_operand:SI 3 "memory_operand" "m")]))
11307   (clobber (match_scratch:SI 4 "=r"))]
11308  "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11309  "*
11310  {
11311    rtx ldm[3];
11312    rtx arith[4];
11313    rtx base_reg;
11314    HOST_WIDE_INT val1 = 0, val2 = 0;
11315
11316    if (REGNO (operands[0]) > REGNO (operands[4]))
11317      {
11318	ldm[1] = operands[4];
11319	ldm[2] = operands[0];
11320      }
11321    else
11322      {
11323	ldm[1] = operands[0];
11324	ldm[2] = operands[4];
11325      }
11326
11327    base_reg = XEXP (operands[2], 0);
11328
11329    if (!REG_P (base_reg))
11330      {
11331	val1 = INTVAL (XEXP (base_reg, 1));
11332	base_reg = XEXP (base_reg, 0);
11333      }
11334
11335    if (!REG_P (XEXP (operands[3], 0)))
11336      val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11337
11338    arith[0] = operands[0];
11339    arith[3] = operands[1];
11340
11341    if (val1 < val2)
11342      {
11343	arith[1] = ldm[1];
11344	arith[2] = ldm[2];
11345      }
11346    else
11347      {
11348	arith[1] = ldm[2];
11349	arith[2] = ldm[1];
11350      }
11351
11352    ldm[0] = base_reg;
11353    if (val1 !=0 && val2 != 0)
11354      {
11355	rtx ops[3];
11356
11357	if (val1 == 4 || val2 == 4)
11358	  /* Other val must be 8, since we know they are adjacent and neither
11359	     is zero.  */
11360	  output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
11361	else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11362	  {
11363	    ldm[0] = ops[0] = operands[4];
11364	    ops[1] = base_reg;
11365	    ops[2] = GEN_INT (val1);
11366	    output_add_immediate (ops);
11367	    if (val1 < val2)
11368	      output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11369	    else
11370	      output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11371	  }
11372	else
11373	  {
11374	    /* Offset is out of range for a single add, so use two ldr.  */
11375	    ops[0] = ldm[1];
11376	    ops[1] = base_reg;
11377	    ops[2] = GEN_INT (val1);
11378	    output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11379	    ops[0] = ldm[2];
11380	    ops[2] = GEN_INT (val2);
11381	    output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11382	  }
11383      }
11384    else if (val1 != 0)
11385      {
11386	if (val1 < val2)
11387	  output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11388	else
11389	  output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11390      }
11391    else
11392      {
11393	if (val1 < val2)
11394	  output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11395	else
11396	  output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11397      }
11398    output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11399    return \"\";
11400  }"
11401  [(set_attr "length" "12")
11402   (set_attr "predicable" "yes")
11403   (set_attr "type" "load_4")]
11404)
11405
11406; This pattern is never tried by combine, so do it as a peephole
11407
11408(define_peephole2
11409  [(set (match_operand:SI 0 "arm_general_register_operand" "")
11410	(match_operand:SI 1 "arm_general_register_operand" ""))
11411   (set (reg:CC CC_REGNUM)
11412	(compare:CC (match_dup 1) (const_int 0)))]
11413  "TARGET_ARM"
11414  [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11415	      (set (match_dup 0) (match_dup 1))])]
11416  ""
11417)
11418
11419(define_split
11420  [(set (match_operand:SI 0 "s_register_operand" "")
11421	(and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11422		       (const_int 0))
11423		(neg:SI (match_operator:SI 2 "arm_comparison_operator"
11424			 [(match_operand:SI 3 "s_register_operand" "")
11425			  (match_operand:SI 4 "arm_rhs_operand" "")]))))
11426   (clobber (match_operand:SI 5 "s_register_operand" ""))]
11427  "TARGET_ARM"
11428  [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11429   (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11430			      (match_dup 5)))]
11431  ""
11432)
11433
11434;; This split can be used because CC_Z mode implies that the following
11435;; branch will be an equality, or an unsigned inequality, so the sign
11436;; extension is not needed.
11437
11438(define_split
11439  [(set (reg:CC_Z CC_REGNUM)
11440	(compare:CC_Z
11441	 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11442		    (const_int 24))
11443	 (match_operand 1 "const_int_operand" "")))
11444   (clobber (match_scratch:SI 2 ""))]
11445  "TARGET_ARM
11446   && ((UINTVAL (operands[1]))
11447       == ((UINTVAL (operands[1])) >> 24) << 24)"
11448  [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11449   (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11450  "
11451  operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11452  "
11453)
11454;; ??? Check the patterns above for Thumb-2 usefulness
11455
11456(define_expand "prologue"
11457  [(clobber (const_int 0))]
11458  "TARGET_EITHER"
11459  "if (TARGET_32BIT)
11460     arm_expand_prologue ();
11461   else
11462     thumb1_expand_prologue ();
11463  DONE;
11464  "
11465)
11466
11467(define_expand "epilogue"
11468  [(clobber (const_int 0))]
11469  "TARGET_EITHER"
11470  "
11471  if (crtl->calls_eh_return)
11472    emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11473  if (TARGET_THUMB1)
11474   {
11475     thumb1_expand_epilogue ();
11476     emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11477                     gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11478   }
11479  else if (HAVE_return)
11480   {
11481     /* HAVE_return is testing for USE_RETURN_INSN (FALSE).  Hence,
11482        no need for explicit testing again.  */
11483     emit_jump_insn (gen_return ());
11484   }
11485  else if (TARGET_32BIT)
11486   {
11487    arm_expand_epilogue (true);
11488   }
11489  DONE;
11490  "
11491)
11492
11493;; Note - although unspec_volatile's USE all hard registers,
11494;; USEs are ignored after relaod has completed.  Thus we need
11495;; to add an unspec of the link register to ensure that flow
11496;; does not think that it is unused by the sibcall branch that
11497;; will replace the standard function epilogue.
11498(define_expand "sibcall_epilogue"
11499   [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11500               (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11501   "TARGET_32BIT"
11502   "
11503   arm_expand_epilogue (false);
11504   DONE;
11505   "
11506)
11507
11508(define_expand "eh_epilogue"
11509  [(use (match_operand:SI 0 "register_operand"))
11510   (use (match_operand:SI 1 "register_operand"))
11511   (use (match_operand:SI 2 "register_operand"))]
11512  "TARGET_EITHER"
11513  "
11514  {
11515    cfun->machine->eh_epilogue_sp_ofs = operands[1];
11516    if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11517      {
11518	rtx ra = gen_rtx_REG (Pmode, 2);
11519
11520	emit_move_insn (ra, operands[2]);
11521	operands[2] = ra;
11522      }
11523    /* This is a hack -- we may have crystalized the function type too
11524       early.  */
11525    cfun->machine->func_type = 0;
11526  }"
11527)
11528
11529;; This split is only used during output to reduce the number of patterns
11530;; that need assembler instructions adding to them.  We allowed the setting
11531;; of the conditions to be implicit during rtl generation so that
11532;; the conditional compare patterns would work.  However this conflicts to
11533;; some extent with the conditional data operations, so we have to split them
11534;; up again here.
11535
11536;; ??? Need to audit these splitters for Thumb-2.  Why isn't normal
11537;; conditional execution sufficient?
11538
11539(define_split
11540  [(set (match_operand:SI 0 "s_register_operand" "")
11541	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
11542			  [(match_operand 2 "" "") (match_operand 3 "" "")])
11543			 (match_dup 0)
11544			 (match_operand 4 "" "")))
11545   (clobber (reg:CC CC_REGNUM))]
11546  "TARGET_ARM && reload_completed"
11547  [(set (match_dup 5) (match_dup 6))
11548   (cond_exec (match_dup 7)
11549	      (set (match_dup 0) (match_dup 4)))]
11550  "
11551  {
11552    machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11553					     operands[2], operands[3]);
11554    enum rtx_code rc = GET_CODE (operands[1]);
11555
11556    operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11557    operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11558    if (mode == CCFPmode || mode == CCFPEmode)
11559      rc = reverse_condition_maybe_unordered (rc);
11560    else
11561      rc = reverse_condition (rc);
11562
11563    operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11564  }"
11565)
11566
11567(define_split
11568  [(set (match_operand:SI 0 "s_register_operand" "")
11569	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
11570			  [(match_operand 2 "" "") (match_operand 3 "" "")])
11571			 (match_operand 4 "" "")
11572			 (match_dup 0)))
11573   (clobber (reg:CC CC_REGNUM))]
11574  "TARGET_ARM && reload_completed"
11575  [(set (match_dup 5) (match_dup 6))
11576   (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11577	      (set (match_dup 0) (match_dup 4)))]
11578  "
11579  {
11580    machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11581					     operands[2], operands[3]);
11582
11583    operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11584    operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11585  }"
11586)
11587
11588(define_split
11589  [(set (match_operand:SI 0 "s_register_operand" "")
11590	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
11591			  [(match_operand 2 "" "") (match_operand 3 "" "")])
11592			 (match_operand 4 "" "")
11593			 (match_operand 5 "" "")))
11594   (clobber (reg:CC CC_REGNUM))]
11595  "TARGET_ARM && reload_completed"
11596  [(set (match_dup 6) (match_dup 7))
11597   (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11598	      (set (match_dup 0) (match_dup 4)))
11599   (cond_exec (match_dup 8)
11600	      (set (match_dup 0) (match_dup 5)))]
11601  "
11602  {
11603    machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11604					     operands[2], operands[3]);
11605    enum rtx_code rc = GET_CODE (operands[1]);
11606
11607    operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11608    operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11609    if (mode == CCFPmode || mode == CCFPEmode)
11610      rc = reverse_condition_maybe_unordered (rc);
11611    else
11612      rc = reverse_condition (rc);
11613
11614    operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11615  }"
11616)
11617
11618(define_split
11619  [(set (match_operand:SI 0 "s_register_operand" "")
11620	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
11621			  [(match_operand:SI 2 "s_register_operand" "")
11622			   (match_operand:SI 3 "arm_add_operand" "")])
11623			 (match_operand:SI 4 "arm_rhs_operand" "")
11624			 (not:SI
11625			  (match_operand:SI 5 "s_register_operand" ""))))
11626   (clobber (reg:CC CC_REGNUM))]
11627  "TARGET_ARM && reload_completed"
11628  [(set (match_dup 6) (match_dup 7))
11629   (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11630	      (set (match_dup 0) (match_dup 4)))
11631   (cond_exec (match_dup 8)
11632	      (set (match_dup 0) (not:SI (match_dup 5))))]
11633  "
11634  {
11635    machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11636					     operands[2], operands[3]);
11637    enum rtx_code rc = GET_CODE (operands[1]);
11638
11639    operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11640    operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11641    if (mode == CCFPmode || mode == CCFPEmode)
11642      rc = reverse_condition_maybe_unordered (rc);
11643    else
11644      rc = reverse_condition (rc);
11645
11646    operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11647  }"
11648)
11649
11650(define_insn "*cond_move_not"
11651  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11652	(if_then_else:SI (match_operator 4 "arm_comparison_operator"
11653			  [(match_operand 3 "cc_register" "") (const_int 0)])
11654			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11655			 (not:SI
11656			  (match_operand:SI 2 "s_register_operand" "r,r"))))]
11657  "TARGET_ARM"
11658  "@
11659   mvn%D4\\t%0, %2
11660   mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11661  [(set_attr "conds" "use")
11662   (set_attr "type" "mvn_reg,multiple")
11663   (set_attr "length" "4,8")]
11664)
11665
11666;; The next two patterns occur when an AND operation is followed by a
11667;; scc insn sequence
11668
11669(define_insn "*sign_extract_onebit"
11670  [(set (match_operand:SI 0 "s_register_operand" "=r")
11671	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11672			 (const_int 1)
11673			 (match_operand:SI 2 "const_int_operand" "n")))
11674    (clobber (reg:CC CC_REGNUM))]
11675  "TARGET_ARM"
11676  "*
11677    operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11678    output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11679    return \"mvnne\\t%0, #0\";
11680  "
11681  [(set_attr "conds" "clob")
11682   (set_attr "length" "8")
11683   (set_attr "type" "multiple")]
11684)
11685
11686(define_insn "*not_signextract_onebit"
11687  [(set (match_operand:SI 0 "s_register_operand" "=r")
11688	(not:SI
11689	 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11690			  (const_int 1)
11691			  (match_operand:SI 2 "const_int_operand" "n"))))
11692   (clobber (reg:CC CC_REGNUM))]
11693  "TARGET_ARM"
11694  "*
11695    operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11696    output_asm_insn (\"tst\\t%1, %2\", operands);
11697    output_asm_insn (\"mvneq\\t%0, #0\", operands);
11698    return \"movne\\t%0, #0\";
11699  "
11700  [(set_attr "conds" "clob")
11701   (set_attr "length" "12")
11702   (set_attr "type" "multiple")]
11703)
11704;; ??? The above patterns need auditing for Thumb-2
11705
11706;; Push multiple registers to the stack.  Registers are in parallel (use ...)
11707;; expressions.  For simplicity, the first register is also in the unspec
11708;; part.
11709;; To avoid the usage of GNU extension, the length attribute is computed
11710;; in a C function arm_attr_length_push_multi.
11711(define_insn "*push_multi"
11712  [(match_parallel 2 "multi_register_push"
11713    [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11714	  (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11715		      UNSPEC_PUSH_MULT))])]
11716  ""
11717  "*
11718  {
11719    int num_saves = XVECLEN (operands[2], 0);
11720
11721    /* For the StrongARM at least it is faster to
11722       use STR to store only a single register.
11723       In Thumb mode always use push, and the assembler will pick
11724       something appropriate.  */
11725    if (num_saves == 1 && TARGET_ARM)
11726      output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11727    else
11728      {
11729	int i;
11730	char pattern[100];
11731
11732	if (TARGET_32BIT)
11733	    strcpy (pattern, \"push%?\\t{%1\");
11734	else
11735	    strcpy (pattern, \"push\\t{%1\");
11736
11737	for (i = 1; i < num_saves; i++)
11738	  {
11739	    strcat (pattern, \", %|\");
11740	    strcat (pattern,
11741		    reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11742	  }
11743
11744	strcat (pattern, \"}\");
11745	output_asm_insn (pattern, operands);
11746      }
11747
11748    return \"\";
11749  }"
11750  [(set_attr "type" "store_16")
11751   (set (attr "length")
11752	(symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11753)
11754
11755(define_insn "stack_tie"
11756  [(set (mem:BLK (scratch))
11757	(unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11758		     (match_operand:SI 1 "s_register_operand" "rk")]
11759		    UNSPEC_PRLG_STK))]
11760  ""
11761  ""
11762  [(set_attr "length" "0")
11763   (set_attr "type" "block")]
11764)
11765
11766;; Pop (as used in epilogue RTL)
11767;;
11768(define_insn "*load_multiple_with_writeback"
11769  [(match_parallel 0 "load_multiple_operation"
11770    [(set (match_operand:SI 1 "s_register_operand" "+rk")
11771          (plus:SI (match_dup 1)
11772                   (match_operand:SI 2 "const_int_I_operand" "I")))
11773     (set (match_operand:SI 3 "s_register_operand" "=rk")
11774          (mem:SI (match_dup 1)))
11775        ])]
11776  "TARGET_32BIT && (reload_in_progress || reload_completed)"
11777  "*
11778  {
11779    arm_output_multireg_pop (operands, /*return_pc=*/false,
11780                                       /*cond=*/const_true_rtx,
11781                                       /*reverse=*/false,
11782                                       /*update=*/true);
11783    return \"\";
11784  }
11785  "
11786  [(set_attr "type" "load_16")
11787   (set_attr "predicable" "yes")
11788   (set (attr "length")
11789	(symbol_ref "arm_attr_length_pop_multi (operands,
11790						/*return_pc=*/false,
11791						/*write_back_p=*/true)"))]
11792)
11793
11794;; Pop with return (as used in epilogue RTL)
11795;;
11796;; This instruction is generated when the registers are popped at the end of
11797;; epilogue.  Here, instead of popping the value into LR and then generating
11798;; jump to LR, value is popped into PC directly.  Hence, the pattern is combined
11799;;  with (return).
11800(define_insn "*pop_multiple_with_writeback_and_return"
11801  [(match_parallel 0 "pop_multiple_return"
11802    [(return)
11803     (set (match_operand:SI 1 "s_register_operand" "+rk")
11804          (plus:SI (match_dup 1)
11805                   (match_operand:SI 2 "const_int_I_operand" "I")))
11806     (set (match_operand:SI 3 "s_register_operand" "=rk")
11807          (mem:SI (match_dup 1)))
11808        ])]
11809  "TARGET_32BIT && (reload_in_progress || reload_completed)"
11810  "*
11811  {
11812    arm_output_multireg_pop (operands, /*return_pc=*/true,
11813                                       /*cond=*/const_true_rtx,
11814                                       /*reverse=*/false,
11815                                       /*update=*/true);
11816    return \"\";
11817  }
11818  "
11819  [(set_attr "type" "load_16")
11820   (set_attr "predicable" "yes")
11821   (set (attr "length")
11822	(symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11823						/*write_back_p=*/true)"))]
11824)
11825
11826(define_insn "*pop_multiple_with_return"
11827  [(match_parallel 0 "pop_multiple_return"
11828    [(return)
11829     (set (match_operand:SI 2 "s_register_operand" "=rk")
11830          (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11831        ])]
11832  "TARGET_32BIT && (reload_in_progress || reload_completed)"
11833  "*
11834  {
11835    arm_output_multireg_pop (operands, /*return_pc=*/true,
11836                                       /*cond=*/const_true_rtx,
11837                                       /*reverse=*/false,
11838                                       /*update=*/false);
11839    return \"\";
11840  }
11841  "
11842  [(set_attr "type" "load_16")
11843   (set_attr "predicable" "yes")
11844   (set (attr "length")
11845	(symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11846						/*write_back_p=*/false)"))]
11847)
11848
11849;; Load into PC and return
11850(define_insn "*ldr_with_return"
11851  [(return)
11852   (set (reg:SI PC_REGNUM)
11853        (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11854  "TARGET_32BIT && (reload_in_progress || reload_completed)"
11855  "ldr%?\t%|pc, [%0], #4"
11856  [(set_attr "type" "load_4")
11857   (set_attr "predicable" "yes")]
11858)
11859;; Pop for floating point registers (as used in epilogue RTL)
11860(define_insn "*vfp_pop_multiple_with_writeback"
11861  [(match_parallel 0 "pop_multiple_fp"
11862    [(set (match_operand:SI 1 "s_register_operand" "+rk")
11863          (plus:SI (match_dup 1)
11864                   (match_operand:SI 2 "const_int_I_operand" "I")))
11865     (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11866          (mem:DF (match_dup 1)))])]
11867  "TARGET_32BIT && TARGET_VFP_BASE"
11868  "*
11869  {
11870    int num_regs = XVECLEN (operands[0], 0);
11871    char pattern[100];
11872    rtx op_list[2];
11873    strcpy (pattern, \"vldm\\t\");
11874    strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11875    strcat (pattern, \"!, {\");
11876    op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11877    strcat (pattern, \"%P0\");
11878    if ((num_regs - 1) > 1)
11879      {
11880        strcat (pattern, \"-%P1\");
11881        op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11882      }
11883
11884    strcat (pattern, \"}\");
11885    output_asm_insn (pattern, op_list);
11886    return \"\";
11887  }
11888  "
11889  [(set_attr "type" "load_16")
11890   (set_attr "conds" "unconditional")
11891   (set_attr "predicable" "no")]
11892)
11893
11894;; Special patterns for dealing with the constant pool
11895
11896(define_insn "align_4"
11897  [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11898  "TARGET_EITHER"
11899  "*
11900  assemble_align (32);
11901  return \"\";
11902  "
11903  [(set_attr "type" "no_insn")]
11904)
11905
11906(define_insn "align_8"
11907  [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11908  "TARGET_EITHER"
11909  "*
11910  assemble_align (64);
11911  return \"\";
11912  "
11913  [(set_attr "type" "no_insn")]
11914)
11915
11916(define_insn "consttable_end"
11917  [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11918  "TARGET_EITHER"
11919  "*
11920  making_const_table = FALSE;
11921  return \"\";
11922  "
11923  [(set_attr "type" "no_insn")]
11924)
11925
11926(define_insn "consttable_1"
11927  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11928  "TARGET_EITHER"
11929  "*
11930  making_const_table = TRUE;
11931  assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11932  assemble_zeros (3);
11933  return \"\";
11934  "
11935  [(set_attr "length" "4")
11936   (set_attr "type" "no_insn")]
11937)
11938
11939(define_insn "consttable_2"
11940  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11941  "TARGET_EITHER"
11942  "*
11943  {
11944    rtx x = operands[0];
11945    making_const_table = TRUE;
11946    switch (GET_MODE_CLASS (GET_MODE (x)))
11947      {
11948      case MODE_FLOAT:
11949	arm_emit_fp16_const (x);
11950	break;
11951      default:
11952	assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11953	assemble_zeros (2);
11954	break;
11955      }
11956    return \"\";
11957  }"
11958  [(set_attr "length" "4")
11959   (set_attr "type" "no_insn")]
11960)
11961
11962(define_insn "consttable_4"
11963  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11964  "TARGET_EITHER"
11965  "*
11966  {
11967    rtx x = operands[0];
11968    making_const_table = TRUE;
11969    scalar_float_mode float_mode;
11970    if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11971      assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11972    else
11973      {
11974	/* XXX: Sometimes gcc does something really dumb and ends up with
11975	   a HIGH in a constant pool entry, usually because it's trying to
11976	   load into a VFP register.  We know this will always be used in
11977	   combination with a LO_SUM which ignores the high bits, so just
11978	   strip off the HIGH.  */
11979	if (GET_CODE (x) == HIGH)
11980	  x = XEXP (x, 0);
11981        assemble_integer (x, 4, BITS_PER_WORD, 1);
11982	mark_symbol_refs_as_used (x);
11983      }
11984    return \"\";
11985  }"
11986  [(set_attr "length" "4")
11987   (set_attr "type" "no_insn")]
11988)
11989
11990(define_insn "consttable_8"
11991  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11992  "TARGET_EITHER"
11993  "*
11994  {
11995    making_const_table = TRUE;
11996    scalar_float_mode float_mode;
11997    if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11998      assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11999		     float_mode, BITS_PER_WORD);
12000    else
12001      assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
12002    return \"\";
12003  }"
12004  [(set_attr "length" "8")
12005   (set_attr "type" "no_insn")]
12006)
12007
12008(define_insn "consttable_16"
12009  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
12010  "TARGET_EITHER"
12011  "*
12012  {
12013    making_const_table = TRUE;
12014    scalar_float_mode float_mode;
12015    if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
12016      assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
12017		     float_mode, BITS_PER_WORD);
12018    else
12019      assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12020    return \"\";
12021  }"
12022  [(set_attr "length" "16")
12023   (set_attr "type" "no_insn")]
12024)
12025
12026;; V5 Instructions,
12027
12028(define_insn "clzsi2"
12029  [(set (match_operand:SI 0 "s_register_operand" "=r")
12030	(clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12031  "TARGET_32BIT && arm_arch5t"
12032  "clz%?\\t%0, %1"
12033  [(set_attr "predicable" "yes")
12034   (set_attr "type" "clz")])
12035
12036(define_insn "rbitsi2"
12037  [(set (match_operand:SI 0 "s_register_operand" "=r")
12038	(unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12039  "TARGET_32BIT && arm_arch_thumb2"
12040  "rbit%?\\t%0, %1"
12041  [(set_attr "predicable" "yes")
12042   (set_attr "type" "clz")])
12043
12044;; Keep this as a CTZ expression until after reload and then split
12045;; into RBIT + CLZ.  Since RBIT is represented as an UNSPEC it is unlikely
12046;; to fold with any other expression.
12047
12048(define_insn_and_split "ctzsi2"
12049 [(set (match_operand:SI           0 "s_register_operand" "=r")
12050       (ctz:SI (match_operand:SI  1 "s_register_operand" "r")))]
12051  "TARGET_32BIT && arm_arch_thumb2"
12052  "#"
12053  "&& reload_completed"
12054  [(const_int 0)]
12055  "
12056  emit_insn (gen_rbitsi2 (operands[0], operands[1]));
12057  emit_insn (gen_clzsi2 (operands[0], operands[0]));
12058  DONE;
12059")
12060
12061;; V5E instructions.
12062
12063(define_insn "prefetch"
12064  [(prefetch (match_operand:SI 0 "address_operand" "p")
12065	     (match_operand:SI 1 "" "")
12066	     (match_operand:SI 2 "" ""))]
12067  "TARGET_32BIT && arm_arch5te"
12068  "pld\\t%a0"
12069  [(set_attr "type" "load_4")]
12070)
12071
12072;; General predication pattern
12073
12074(define_cond_exec
12075  [(match_operator 0 "arm_comparison_operator"
12076    [(match_operand 1 "cc_register" "")
12077     (const_int 0)])]
12078  "TARGET_32BIT
12079   && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
12080  ""
12081[(set_attr "predicated" "yes")]
12082)
12083
12084(define_insn "force_register_use"
12085  [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12086  ""
12087  "%@ %0 needed"
12088  [(set_attr "length" "0")
12089   (set_attr "type" "no_insn")]
12090)
12091
12092
12093;; Patterns for exception handling
12094
12095(define_expand "eh_return"
12096  [(use (match_operand 0 "general_operand"))]
12097  "TARGET_EITHER"
12098  "
12099  {
12100    if (TARGET_32BIT)
12101      emit_insn (gen_arm_eh_return (operands[0]));
12102    else
12103      emit_insn (gen_thumb_eh_return (operands[0]));
12104    DONE;
12105  }"
12106)
12107
12108;; We can't expand this before we know where the link register is stored.
12109(define_insn_and_split "arm_eh_return"
12110  [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12111		    VUNSPEC_EH_RETURN)
12112   (clobber (match_scratch:SI 1 "=&r"))]
12113  "TARGET_ARM"
12114  "#"
12115  "&& reload_completed"
12116  [(const_int 0)]
12117  "
12118  {
12119    arm_set_return_address (operands[0], operands[1]);
12120    DONE;
12121  }"
12122)
12123
12124
12125;; TLS support
12126
12127(define_insn "load_tp_hard"
12128  [(set (match_operand:SI 0 "register_operand" "=r")
12129	(unspec:SI [(const_int 0)] UNSPEC_TLS))]
12130  "TARGET_HARD_TP"
12131  "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12132  [(set_attr "predicable" "yes")
12133   (set_attr "type" "mrs")]
12134)
12135
12136;; Doesn't clobber R1-R3.  Must use r0 for the first operand.
12137(define_insn "load_tp_soft_fdpic"
12138  [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12139   (clobber (reg:SI FDPIC_REGNUM))
12140   (clobber (reg:SI LR_REGNUM))
12141   (clobber (reg:SI IP_REGNUM))
12142   (clobber (reg:CC CC_REGNUM))]
12143  "TARGET_SOFT_TP && TARGET_FDPIC"
12144  "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12145  [(set_attr "conds" "clob")
12146   (set_attr "type" "branch")]
12147)
12148
12149;; Doesn't clobber R1-R3.  Must use r0 for the first operand.
12150(define_insn "load_tp_soft"
12151  [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12152   (clobber (reg:SI LR_REGNUM))
12153   (clobber (reg:SI IP_REGNUM))
12154   (clobber (reg:CC CC_REGNUM))]
12155  "TARGET_SOFT_TP && !TARGET_FDPIC"
12156  "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12157  [(set_attr "conds" "clob")
12158   (set_attr "type" "branch")]
12159)
12160
12161;; tls descriptor call
12162(define_insn "tlscall"
12163  [(set (reg:SI R0_REGNUM)
12164        (unspec:SI [(reg:SI R0_REGNUM)
12165                    (match_operand:SI 0 "" "X")
12166	            (match_operand 1 "" "")] UNSPEC_TLS))
12167   (clobber (reg:SI R1_REGNUM))
12168   (clobber (reg:SI LR_REGNUM))
12169   (clobber (reg:SI CC_REGNUM))]
12170  "TARGET_GNU2_TLS"
12171  {
12172    targetm.asm_out.internal_label (asm_out_file, "LPIC",
12173				    INTVAL (operands[1]));
12174    return "bl\\t%c0(tlscall)";
12175  }
12176  [(set_attr "conds" "clob")
12177   (set_attr "length" "4")
12178   (set_attr "type" "branch")]
12179)
12180
12181;; For thread pointer builtin
12182(define_expand "get_thread_pointersi"
12183  [(match_operand:SI 0 "s_register_operand")]
12184 ""
12185 "
12186 {
12187   arm_load_tp (operands[0]);
12188   DONE;
12189 }")
12190
12191;;
12192
12193;; We only care about the lower 16 bits of the constant
12194;; being inserted into the upper 16 bits of the register.
12195(define_insn "*arm_movtas_ze"
12196  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
12197                   (const_int 16)
12198                   (const_int 16))
12199        (match_operand:SI 1 "const_int_operand" ""))]
12200  "TARGET_HAVE_MOVT"
12201  "@
12202   movt%?\t%0, %L1
12203   movt\t%0, %L1"
12204 [(set_attr "arch" "32,v8mb")
12205  (set_attr "predicable" "yes")
12206  (set_attr "length" "4")
12207  (set_attr "type" "alu_sreg")]
12208)
12209
12210(define_insn "*arm_rev"
12211  [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12212	(bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12213  "arm_arch6"
12214  "@
12215   rev\t%0, %1
12216   rev%?\t%0, %1
12217   rev%?\t%0, %1"
12218  [(set_attr "arch" "t1,t2,32")
12219   (set_attr "length" "2,2,4")
12220   (set_attr "predicable" "no,yes,yes")
12221   (set_attr "type" "rev")]
12222)
12223
12224(define_expand "arm_legacy_rev"
12225  [(set (match_operand:SI 2 "s_register_operand")
12226	(xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
12227			     (const_int 16))
12228		(match_dup 1)))
12229   (set (match_dup 2)
12230	(lshiftrt:SI (match_dup 2)
12231		     (const_int 8)))
12232   (set (match_operand:SI 3 "s_register_operand")
12233	(rotatert:SI (match_dup 1)
12234		     (const_int 8)))
12235   (set (match_dup 2)
12236	(and:SI (match_dup 2)
12237		(const_int -65281)))
12238   (set (match_operand:SI 0 "s_register_operand")
12239	(xor:SI (match_dup 3)
12240		(match_dup 2)))]
12241  "TARGET_32BIT"
12242  ""
12243)
12244
12245;; Reuse temporaries to keep register pressure down.
12246(define_expand "thumb_legacy_rev"
12247  [(set (match_operand:SI 2 "s_register_operand")
12248     (ashift:SI (match_operand:SI 1 "s_register_operand")
12249                (const_int 24)))
12250   (set (match_operand:SI 3 "s_register_operand")
12251     (lshiftrt:SI (match_dup 1)
12252		  (const_int 24)))
12253   (set (match_dup 3)
12254     (ior:SI (match_dup 3)
12255	     (match_dup 2)))
12256   (set (match_operand:SI 4 "s_register_operand")
12257     (const_int 16))
12258   (set (match_operand:SI 5 "s_register_operand")
12259     (rotatert:SI (match_dup 1)
12260		  (match_dup 4)))
12261   (set (match_dup 2)
12262     (ashift:SI (match_dup 5)
12263                (const_int 24)))
12264   (set (match_dup 5)
12265     (lshiftrt:SI (match_dup 5)
12266		  (const_int 24)))
12267   (set (match_dup 5)
12268     (ior:SI (match_dup 5)
12269	     (match_dup 2)))
12270   (set (match_dup 5)
12271     (rotatert:SI (match_dup 5)
12272		  (match_dup 4)))
12273   (set (match_operand:SI 0 "s_register_operand")
12274     (ior:SI (match_dup 5)
12275             (match_dup 3)))]
12276  "TARGET_THUMB"
12277  ""
12278)
12279
12280;; ARM-specific expansion of signed mod by power of 2
12281;; using conditional negate.
12282;; For r0 % n where n is a power of 2 produce:
12283;; rsbs    r1, r0, #0
12284;; and     r0, r0, #(n - 1)
12285;; and     r1, r1, #(n - 1)
12286;; rsbpl   r0, r1, #0
12287
12288(define_expand "modsi3"
12289  [(match_operand:SI 0 "register_operand")
12290   (match_operand:SI 1 "register_operand")
12291   (match_operand:SI 2 "const_int_operand")]
12292  "TARGET_32BIT"
12293  {
12294    HOST_WIDE_INT val = INTVAL (operands[2]);
12295
12296    if (val <= 0
12297       || exact_log2 (val) <= 0)
12298      FAIL;
12299
12300    rtx mask = GEN_INT (val - 1);
12301
12302    /* In the special case of x0 % 2 we can do the even shorter:
12303	cmp     r0, #0
12304	and     r0, r0, #1
12305	rsblt   r0, r0, #0.  */
12306
12307    if (val == 2)
12308      {
12309	rtx cc_reg = arm_gen_compare_reg (LT,
12310					  operands[1], const0_rtx, NULL_RTX);
12311	rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
12312	rtx masked = gen_reg_rtx (SImode);
12313
12314	emit_insn (gen_andsi3 (masked, operands[1], mask));
12315	emit_move_insn (operands[0],
12316			gen_rtx_IF_THEN_ELSE (SImode, cond,
12317					      gen_rtx_NEG (SImode,
12318							   masked),
12319					      masked));
12320	DONE;
12321      }
12322
12323    rtx neg_op = gen_reg_rtx (SImode);
12324    rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
12325						      operands[1]));
12326
12327    /* Extract the condition register and mode.  */
12328    rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
12329    rtx cc_reg = SET_DEST (cmp);
12330    rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
12331
12332    emit_insn (gen_andsi3 (operands[0], operands[1], mask));
12333
12334    rtx masked_neg = gen_reg_rtx (SImode);
12335    emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
12336
12337    /* We want a conditional negate here, but emitting COND_EXEC rtxes
12338       during expand does not always work.  Do an IF_THEN_ELSE instead.  */
12339    emit_move_insn (operands[0],
12340		    gen_rtx_IF_THEN_ELSE (SImode, cond,
12341					  gen_rtx_NEG (SImode, masked_neg),
12342					  operands[0]));
12343
12344
12345    DONE;
12346  }
12347)
12348
12349(define_expand "bswapsi2"
12350  [(set (match_operand:SI 0 "s_register_operand")
12351	(bswap:SI (match_operand:SI 1 "s_register_operand")))]
12352"TARGET_EITHER && (arm_arch6 || !optimize_size)"
12353"
12354    if (!arm_arch6)
12355      {
12356	rtx op2 = gen_reg_rtx (SImode);
12357	rtx op3 = gen_reg_rtx (SImode);
12358
12359	if (TARGET_THUMB)
12360	  {
12361	    rtx op4 = gen_reg_rtx (SImode);
12362	    rtx op5 = gen_reg_rtx (SImode);
12363
12364	    emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12365					     op2, op3, op4, op5));
12366	  }
12367	else
12368	  {
12369	    emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12370					   op2, op3));
12371	  }
12372
12373	DONE;
12374      }
12375  "
12376)
12377
12378;; bswap16 patterns: use revsh and rev16 instructions for the signed
12379;; and unsigned variants, respectively. For rev16, expose
12380;; byte-swapping in the lower 16 bits only.
12381(define_insn "*arm_revsh"
12382  [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12383	(sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12384  "arm_arch6"
12385  "@
12386  revsh\t%0, %1
12387  revsh%?\t%0, %1
12388  revsh%?\t%0, %1"
12389  [(set_attr "arch" "t1,t2,32")
12390   (set_attr "length" "2,2,4")
12391   (set_attr "type" "rev")]
12392)
12393
12394(define_insn "*arm_rev16"
12395  [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12396	(bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12397  "arm_arch6"
12398  "@
12399   rev16\t%0, %1
12400   rev16%?\t%0, %1
12401   rev16%?\t%0, %1"
12402  [(set_attr "arch" "t1,t2,32")
12403   (set_attr "length" "2,2,4")
12404   (set_attr "type" "rev")]
12405)
12406
12407;; There are no canonicalisation rules for the position of the lshiftrt, ashift
12408;; operations within an IOR/AND RTX, therefore we have two patterns matching
12409;; each valid permutation.
12410
12411(define_insn "arm_rev16si2"
12412  [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12413        (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
12414                                   (const_int 8))
12415                        (match_operand:SI 3 "const_int_operand" "n,n,n"))
12416                (and:SI (lshiftrt:SI (match_dup 1)
12417                                     (const_int 8))
12418                        (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
12419  "arm_arch6
12420   && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12421   && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12422  "rev16\\t%0, %1"
12423  [(set_attr "arch" "t1,t2,32")
12424   (set_attr "length" "2,2,4")
12425   (set_attr "type" "rev")]
12426)
12427
12428(define_insn "arm_rev16si2_alt"
12429  [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12430        (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
12431                                     (const_int 8))
12432                        (match_operand:SI 2 "const_int_operand" "n,n,n"))
12433                (and:SI (ashift:SI (match_dup 1)
12434                                   (const_int 8))
12435                        (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
12436  "arm_arch6
12437   && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12438   && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12439  "rev16\\t%0, %1"
12440  [(set_attr "arch" "t1,t2,32")
12441   (set_attr "length" "2,2,4")
12442   (set_attr "type" "rev")]
12443)
12444
12445(define_expand "bswaphi2"
12446  [(set (match_operand:HI 0 "s_register_operand")
12447	(bswap:HI (match_operand:HI 1 "s_register_operand")))]
12448"arm_arch6"
12449""
12450)
12451
12452;; Patterns for LDRD/STRD in Thumb2 mode
12453
12454(define_insn "*thumb2_ldrd"
12455  [(set (match_operand:SI 0 "s_register_operand" "=r")
12456        (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12457                         (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12458   (set (match_operand:SI 3 "s_register_operand" "=r")
12459        (mem:SI (plus:SI (match_dup 1)
12460                         (match_operand:SI 4 "const_int_operand" ""))))]
12461  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12462     && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12463     && (operands_ok_ldrd_strd (operands[0], operands[3],
12464                                  operands[1], INTVAL (operands[2]),
12465                                  false, true))"
12466  "ldrd%?\t%0, %3, [%1, %2]"
12467  [(set_attr "type" "load_8")
12468   (set_attr "predicable" "yes")])
12469
12470(define_insn "*thumb2_ldrd_base"
12471  [(set (match_operand:SI 0 "s_register_operand" "=r")
12472        (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12473   (set (match_operand:SI 2 "s_register_operand" "=r")
12474        (mem:SI (plus:SI (match_dup 1)
12475                         (const_int 4))))]
12476  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12477     && (operands_ok_ldrd_strd (operands[0], operands[2],
12478                                  operands[1], 0, false, true))"
12479  "ldrd%?\t%0, %2, [%1]"
12480  [(set_attr "type" "load_8")
12481   (set_attr "predicable" "yes")])
12482
12483(define_insn "*thumb2_ldrd_base_neg"
12484  [(set (match_operand:SI 0 "s_register_operand" "=r")
12485	(mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12486                         (const_int -4))))
12487   (set (match_operand:SI 2 "s_register_operand" "=r")
12488        (mem:SI (match_dup 1)))]
12489  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12490     && (operands_ok_ldrd_strd (operands[0], operands[2],
12491                                  operands[1], -4, false, true))"
12492  "ldrd%?\t%0, %2, [%1, #-4]"
12493  [(set_attr "type" "load_8")
12494   (set_attr "predicable" "yes")])
12495
12496(define_insn "*thumb2_strd"
12497  [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12498                         (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12499        (match_operand:SI 2 "s_register_operand" "r"))
12500   (set (mem:SI (plus:SI (match_dup 0)
12501                         (match_operand:SI 3 "const_int_operand" "")))
12502        (match_operand:SI 4 "s_register_operand" "r"))]
12503  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12504     && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12505     && (operands_ok_ldrd_strd (operands[2], operands[4],
12506                                  operands[0], INTVAL (operands[1]),
12507                                  false, false))"
12508  "strd%?\t%2, %4, [%0, %1]"
12509  [(set_attr "type" "store_8")
12510   (set_attr "predicable" "yes")])
12511
12512(define_insn "*thumb2_strd_base"
12513  [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12514        (match_operand:SI 1 "s_register_operand" "r"))
12515   (set (mem:SI (plus:SI (match_dup 0)
12516                         (const_int 4)))
12517        (match_operand:SI 2 "s_register_operand" "r"))]
12518  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12519     && (operands_ok_ldrd_strd (operands[1], operands[2],
12520                                  operands[0], 0, false, false))"
12521  "strd%?\t%1, %2, [%0]"
12522  [(set_attr "type" "store_8")
12523   (set_attr "predicable" "yes")])
12524
12525(define_insn "*thumb2_strd_base_neg"
12526  [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12527                         (const_int -4)))
12528        (match_operand:SI 1 "s_register_operand" "r"))
12529   (set (mem:SI (match_dup 0))
12530        (match_operand:SI 2 "s_register_operand" "r"))]
12531  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12532     && (operands_ok_ldrd_strd (operands[1], operands[2],
12533                                  operands[0], -4, false, false))"
12534  "strd%?\t%1, %2, [%0, #-4]"
12535  [(set_attr "type" "store_8")
12536   (set_attr "predicable" "yes")])
12537
12538;; ARMv8 CRC32 instructions.
12539(define_insn "arm_<crc_variant>"
12540  [(set (match_operand:SI 0 "s_register_operand" "=r")
12541        (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
12542                    (match_operand:<crc_mode> 2 "s_register_operand" "r")]
12543         CRC))]
12544  "TARGET_CRC32"
12545  "<crc_variant>\\t%0, %1, %2"
12546  [(set_attr "type" "crc")
12547   (set_attr "conds" "unconditional")]
12548)
12549
12550;; Load the load/store double peephole optimizations.
12551(include "ldrdstrd.md")
12552
12553;; Load the load/store multiple patterns
12554(include "ldmstm.md")
12555
12556;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12557;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12558;; The operands are validated through the load_multiple_operation
12559;; match_parallel predicate rather than through constraints so enable it only
12560;; after reload.
12561(define_insn "*load_multiple"
12562  [(match_parallel 0 "load_multiple_operation"
12563    [(set (match_operand:SI 2 "s_register_operand" "=rk")
12564          (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12565        ])]
12566  "TARGET_32BIT && reload_completed"
12567  "*
12568  {
12569    arm_output_multireg_pop (operands, /*return_pc=*/false,
12570                                       /*cond=*/const_true_rtx,
12571                                       /*reverse=*/false,
12572                                       /*update=*/false);
12573    return \"\";
12574  }
12575  "
12576  [(set_attr "predicable" "yes")]
12577)
12578
12579(define_expand "copysignsf3"
12580  [(match_operand:SF 0 "register_operand")
12581   (match_operand:SF 1 "register_operand")
12582   (match_operand:SF 2 "register_operand")]
12583  "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12584  "{
12585     emit_move_insn (operands[0], operands[2]);
12586     emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
12587		GEN_INT (31), GEN_INT (0),
12588		simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
12589     DONE;
12590  }"
12591)
12592
12593(define_expand "copysigndf3"
12594  [(match_operand:DF 0 "register_operand")
12595   (match_operand:DF 1 "register_operand")
12596   (match_operand:DF 2 "register_operand")]
12597  "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12598  "{
12599     rtx op0_low = gen_lowpart (SImode, operands[0]);
12600     rtx op0_high = gen_highpart (SImode, operands[0]);
12601     rtx op1_low = gen_lowpart (SImode, operands[1]);
12602     rtx op1_high = gen_highpart (SImode, operands[1]);
12603     rtx op2_high = gen_highpart (SImode, operands[2]);
12604
12605     rtx scratch1 = gen_reg_rtx (SImode);
12606     rtx scratch2 = gen_reg_rtx (SImode);
12607     emit_move_insn (scratch1, op2_high);
12608     emit_move_insn (scratch2, op1_high);
12609
12610     emit_insn(gen_rtx_SET(scratch1,
12611			   gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
12612     emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
12613     emit_move_insn (op0_low, op1_low);
12614     emit_move_insn (op0_high, scratch2);
12615
12616     DONE;
12617  }"
12618)
12619
12620;; movmisalign for DImode
12621(define_expand "movmisaligndi"
12622  [(match_operand:DI 0 "general_operand")
12623   (match_operand:DI 1 "general_operand")]
12624  "unaligned_access"
12625{
12626  rtx lo_op0 = gen_lowpart (SImode, operands[0]);
12627  rtx lo_op1 = gen_lowpart (SImode, operands[1]);
12628  rtx hi_op0 = gen_highpart_mode (SImode, DImode, operands[0]);
12629  rtx hi_op1 = gen_highpart_mode (SImode, DImode, operands[1]);
12630
12631  emit_insn (gen_movmisalignsi (lo_op0, lo_op1));
12632  emit_insn (gen_movmisalignsi (hi_op0, hi_op1));
12633  DONE;
12634})
12635
12636;; movmisalign patterns for HImode and SImode.
12637(define_expand "movmisalign<mode>"
12638  [(match_operand:HSI 0 "general_operand")
12639   (match_operand:HSI 1 "general_operand")]
12640  "unaligned_access"
12641{
12642  /* This pattern is not permitted to fail during expansion: if both arguments
12643     are non-registers (e.g. memory := constant), force operand 1 into a
12644     register.  */
12645  rtx (* gen_unaligned_load)(rtx, rtx);
12646  rtx tmp_dest = operands[0];
12647  if (!s_register_operand (operands[0], <MODE>mode)
12648      && !s_register_operand (operands[1], <MODE>mode))
12649    operands[1] = force_reg (<MODE>mode, operands[1]);
12650
12651  if (<MODE>mode == HImode)
12652   {
12653    gen_unaligned_load = gen_unaligned_loadhiu;
12654    tmp_dest = gen_reg_rtx (SImode);
12655   }
12656  else
12657    gen_unaligned_load = gen_unaligned_loadsi;
12658
12659  if (MEM_P (operands[1]))
12660   {
12661    emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
12662    if (<MODE>mode == HImode)
12663      emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
12664   }
12665  else
12666    emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
12667
12668  DONE;
12669})
12670
12671(define_insn "arm_<cdp>"
12672  [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12673		     (match_operand:SI 1 "immediate_operand" "n")
12674		     (match_operand:SI 2 "immediate_operand" "n")
12675		     (match_operand:SI 3 "immediate_operand" "n")
12676		     (match_operand:SI 4 "immediate_operand" "n")
12677		     (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
12678  "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
12679{
12680  arm_const_bounds (operands[0], 0, 16);
12681  arm_const_bounds (operands[1], 0, 16);
12682  arm_const_bounds (operands[2], 0, (1 << 5));
12683  arm_const_bounds (operands[3], 0, (1 << 5));
12684  arm_const_bounds (operands[4], 0, (1 << 5));
12685  arm_const_bounds (operands[5], 0, 8);
12686  return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
12687}
12688  [(set_attr "length" "4")
12689   (set_attr "type" "coproc")])
12690
12691(define_insn "*ldc"
12692  [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12693		     (match_operand:SI 1 "immediate_operand" "n")
12694		     (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12695  "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12696{
12697  arm_const_bounds (operands[0], 0, 16);
12698  arm_const_bounds (operands[1], 0, (1 << 5));
12699  return "<ldc>\\tp%c0, CR%c1, %2";
12700}
12701  [(set_attr "length" "4")
12702   (set_attr "type" "coproc")])
12703
12704(define_insn "*stc"
12705  [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12706		     (match_operand:SI 1 "immediate_operand" "n")
12707		     (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12708  "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12709{
12710  arm_const_bounds (operands[0], 0, 16);
12711  arm_const_bounds (operands[1], 0, (1 << 5));
12712  return "<stc>\\tp%c0, CR%c1, %2";
12713}
12714  [(set_attr "length" "4")
12715   (set_attr "type" "coproc")])
12716
12717(define_expand "arm_<ldc>"
12718  [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12719		     (match_operand:SI 1 "immediate_operand")
12720		     (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12721  "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12722
12723(define_expand "arm_<stc>"
12724  [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12725		     (match_operand:SI 1 "immediate_operand")
12726		     (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12727  "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12728
12729(define_insn "arm_<mcr>"
12730  [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12731		     (match_operand:SI 1 "immediate_operand" "n")
12732		     (match_operand:SI 2 "s_register_operand" "r")
12733		     (match_operand:SI 3 "immediate_operand" "n")
12734		     (match_operand:SI 4 "immediate_operand" "n")
12735		     (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12736   (use (match_dup 2))]
12737  "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12738{
12739  arm_const_bounds (operands[0], 0, 16);
12740  arm_const_bounds (operands[1], 0, 8);
12741  arm_const_bounds (operands[3], 0, (1 << 5));
12742  arm_const_bounds (operands[4], 0, (1 << 5));
12743  arm_const_bounds (operands[5], 0, 8);
12744  return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12745}
12746  [(set_attr "length" "4")
12747   (set_attr "type" "coproc")])
12748
12749(define_insn "arm_<mrc>"
12750  [(set (match_operand:SI 0 "s_register_operand" "=r")
12751	(unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12752			  (match_operand:SI 2 "immediate_operand" "n")
12753			  (match_operand:SI 3 "immediate_operand" "n")
12754			  (match_operand:SI 4 "immediate_operand" "n")
12755			  (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12756  "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12757{
12758  arm_const_bounds (operands[1], 0, 16);
12759  arm_const_bounds (operands[2], 0, 8);
12760  arm_const_bounds (operands[3], 0, (1 << 5));
12761  arm_const_bounds (operands[4], 0, (1 << 5));
12762  arm_const_bounds (operands[5], 0, 8);
12763  return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12764}
12765  [(set_attr "length" "4")
12766   (set_attr "type" "coproc")])
12767
12768(define_insn "arm_<mcrr>"
12769  [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12770		     (match_operand:SI 1 "immediate_operand" "n")
12771		     (match_operand:DI 2 "s_register_operand" "r")
12772		     (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12773   (use (match_dup 2))]
12774  "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12775{
12776  arm_const_bounds (operands[0], 0, 16);
12777  arm_const_bounds (operands[1], 0, 8);
12778  arm_const_bounds (operands[3], 0, (1 << 5));
12779  return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12780}
12781  [(set_attr "length" "4")
12782   (set_attr "type" "coproc")])
12783
12784(define_insn "arm_<mrrc>"
12785  [(set (match_operand:DI 0 "s_register_operand" "=r")
12786	(unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12787			  (match_operand:SI 2 "immediate_operand" "n")
12788			  (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12789  "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12790{
12791  arm_const_bounds (operands[1], 0, 16);
12792  arm_const_bounds (operands[2], 0, 8);
12793  arm_const_bounds (operands[3], 0, (1 << 5));
12794  return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12795}
12796  [(set_attr "length" "4")
12797   (set_attr "type" "coproc")])
12798
12799(define_expand "speculation_barrier"
12800  [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12801  "TARGET_EITHER"
12802  "
12803  /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12804     have a usable barrier (and probably don't need one in practice).
12805     But to be safe if such code is run on later architectures, call a
12806     helper function in libgcc that will do the thing for the active
12807     system.  */
12808  if (!(arm_arch7 || arm_arch8))
12809    {
12810      arm_emit_speculation_barrier_function ();
12811      DONE;
12812    }
12813  "
12814)
12815
12816;; Generate a hard speculation barrier when we have not enabled speculation
12817;; tracking.
12818(define_insn "*speculation_barrier_insn"
12819  [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12820  "arm_arch7 || arm_arch8"
12821  "isb\;dsb\\tsy"
12822  [(set_attr "type" "block")
12823   (set_attr "length" "8")]
12824)
12825
12826;; Vector bits common to IWMMXT, Neon and MVE
12827(include "vec-common.md")
12828;; Load the Intel Wireless Multimedia Extension patterns
12829(include "iwmmxt.md")
12830;; Load the VFP co-processor patterns
12831(include "vfp.md")
12832;; Thumb-1 patterns
12833(include "thumb1.md")
12834;; Thumb-2 patterns
12835(include "thumb2.md")
12836;; Neon patterns
12837(include "neon.md")
12838;; Crypto patterns
12839(include "crypto.md")
12840;; Synchronization Primitives
12841(include "sync.md")
12842;; Fixed-point patterns
12843(include "arm-fixed.md")
12844;; M-profile Vector Extension
12845(include "mve.md")
12846