1;; AltiVec patterns.
2;; Copyright (C) 2002-2014 Free Software Foundation, Inc.
3;; Contributed by Aldy Hernandez (aldy@quesejoda.com)
4
5;; This file is part of GCC.
6
7;; GCC is free software; you can redistribute it and/or modify it
8;; under the terms of the GNU General Public License as published
9;; by the Free Software Foundation; either version 3, or (at your
10;; option) any later version.
11
12;; GCC is distributed in the hope that it will be useful, but WITHOUT
13;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14;; or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15;; License for more details.
16
17;; You should have received a copy of the GNU General Public License
18;; along with GCC; see the file COPYING3.  If not see
19;; <http://www.gnu.org/licenses/>.
20
21(define_c_enum "unspec"
22  [UNSPEC_VCMPBFP
23   UNSPEC_VMSUMU
24   UNSPEC_VMSUMM
25   UNSPEC_VMSUMSHM
26   UNSPEC_VMSUMUHS
27   UNSPEC_VMSUMSHS
28   UNSPEC_VMHADDSHS
29   UNSPEC_VMHRADDSHS
30   UNSPEC_VMLADDUHM
31   UNSPEC_VADDCUW
32   UNSPEC_VADDU
33   UNSPEC_VADDS
34   UNSPEC_VAVGU
35   UNSPEC_VAVGS
36   UNSPEC_VMULEUB
37   UNSPEC_VMULESB
38   UNSPEC_VMULEUH
39   UNSPEC_VMULESH
40   UNSPEC_VMULOUB
41   UNSPEC_VMULOSB
42   UNSPEC_VMULOUH
43   UNSPEC_VMULOSH
44   UNSPEC_VPKPX
45   UNSPEC_VPACK_SIGN_SIGN_SAT
46   UNSPEC_VPACK_SIGN_UNS_SAT
47   UNSPEC_VPACK_UNS_UNS_SAT
48   UNSPEC_VPACK_UNS_UNS_MOD
49   UNSPEC_VPACK_UNS_UNS_MOD_DIRECT
50   UNSPEC_VSLV4SI
51   UNSPEC_VSLO
52   UNSPEC_VSR
53   UNSPEC_VSRO
54   UNSPEC_VSUBCUW
55   UNSPEC_VSUBU
56   UNSPEC_VSUBS
57   UNSPEC_VSUM4UBS
58   UNSPEC_VSUM4S
59   UNSPEC_VSUM2SWS
60   UNSPEC_VSUMSWS
61   UNSPEC_VPERM
62   UNSPEC_VPERM_UNS
63   UNSPEC_VRFIN
64   UNSPEC_VCFUX
65   UNSPEC_VCFSX
66   UNSPEC_VCTUXS
67   UNSPEC_VCTSXS
68   UNSPEC_VLOGEFP
69   UNSPEC_VEXPTEFP
70   UNSPEC_VLSDOI
71   UNSPEC_VUNPACK_HI_SIGN
72   UNSPEC_VUNPACK_LO_SIGN
73   UNSPEC_VUNPACK_HI_SIGN_DIRECT
74   UNSPEC_VUNPACK_LO_SIGN_DIRECT
75   UNSPEC_VUPKHPX
76   UNSPEC_VUPKLPX
77   UNSPEC_DST
78   UNSPEC_DSTT
79   UNSPEC_DSTST
80   UNSPEC_DSTSTT
81   UNSPEC_LVSL
82   UNSPEC_LVSR
83   UNSPEC_LVE
84   UNSPEC_STVX
85   UNSPEC_STVXL
86   UNSPEC_STVE
87   UNSPEC_SET_VSCR
88   UNSPEC_GET_VRSAVE
89   UNSPEC_LVX
90   UNSPEC_REDUC_PLUS
91   UNSPEC_VECSH
92   UNSPEC_EXTEVEN_V4SI
93   UNSPEC_EXTEVEN_V8HI
94   UNSPEC_EXTEVEN_V16QI
95   UNSPEC_EXTEVEN_V4SF
96   UNSPEC_EXTODD_V4SI
97   UNSPEC_EXTODD_V8HI
98   UNSPEC_EXTODD_V16QI
99   UNSPEC_EXTODD_V4SF
100   UNSPEC_INTERHI_V4SI
101   UNSPEC_INTERHI_V8HI
102   UNSPEC_INTERHI_V16QI
103   UNSPEC_INTERLO_V4SI
104   UNSPEC_INTERLO_V8HI
105   UNSPEC_INTERLO_V16QI
106   UNSPEC_LVLX
107   UNSPEC_LVLXL
108   UNSPEC_LVRX
109   UNSPEC_LVRXL
110   UNSPEC_STVLX
111   UNSPEC_STVLXL
112   UNSPEC_STVRX
113   UNSPEC_STVRXL
114   UNSPEC_VMULWHUB
115   UNSPEC_VMULWLUB
116   UNSPEC_VMULWHSB
117   UNSPEC_VMULWLSB
118   UNSPEC_VMULWHUH
119   UNSPEC_VMULWLUH
120   UNSPEC_VMULWHSH
121   UNSPEC_VMULWLSH
122   UNSPEC_VUPKHUB
123   UNSPEC_VUPKHUH
124   UNSPEC_VUPKLUB
125   UNSPEC_VUPKLUH
126   UNSPEC_VPERMSI
127   UNSPEC_VPERMHI
128   UNSPEC_INTERHI
129   UNSPEC_INTERLO
130   UNSPEC_VUPKHS_V4SF
131   UNSPEC_VUPKLS_V4SF
132   UNSPEC_VUPKHU_V4SF
133   UNSPEC_VUPKLU_V4SF
134   UNSPEC_VGBBD
135   UNSPEC_VMRGH_DIRECT
136   UNSPEC_VMRGL_DIRECT
137   UNSPEC_VSPLT_DIRECT
138   UNSPEC_VSUMSWS_DIRECT
139   UNSPEC_VADDCUQ
140   UNSPEC_VADDEUQM
141   UNSPEC_VADDECUQ
142   UNSPEC_VSUBCUQ
143   UNSPEC_VSUBEUQM
144   UNSPEC_VSUBECUQ
145   UNSPEC_VBPERMQ
146   UNSPEC_BCDADD
147   UNSPEC_BCDSUB
148   UNSPEC_BCD_OVERFLOW
149])
150
151(define_c_enum "unspecv"
152  [UNSPECV_SET_VRSAVE
153   UNSPECV_MTVSCR
154   UNSPECV_MFVSCR
155   UNSPECV_DSSALL
156   UNSPECV_DSS
157  ])
158
159;; Vec int modes
160(define_mode_iterator VI [V4SI V8HI V16QI])
161;; Like VI, but add ISA 2.07 integer vector ops
162(define_mode_iterator VI2 [V4SI V8HI V16QI V2DI])
163;; Short vec in modes
164(define_mode_iterator VIshort [V8HI V16QI])
165;; Vec float modes
166(define_mode_iterator VF [V4SF])
167;; Vec modes, pity mode iterators are not composable
168(define_mode_iterator V [V4SI V8HI V16QI V4SF])
169;; Vec modes for move/logical/permute ops, include vector types for move not
170;; otherwise handled by altivec (v2df, v2di, ti)
171(define_mode_iterator VM [V4SI V8HI V16QI V4SF V2DF V2DI V1TI TI])
172
173;; Like VM, except don't do TImode
174(define_mode_iterator VM2 [V4SI V8HI V16QI V4SF V2DF V2DI V1TI])
175
176(define_mode_attr VI_char [(V2DI "d") (V4SI "w") (V8HI "h") (V16QI "b")])
177(define_mode_attr VI_scalar [(V2DI "DI") (V4SI "SI") (V8HI "HI") (V16QI "QI")])
178(define_mode_attr VI_unit [(V16QI "VECTOR_UNIT_ALTIVEC_P (V16QImode)")
179			   (V8HI "VECTOR_UNIT_ALTIVEC_P (V8HImode)")
180			   (V4SI "VECTOR_UNIT_ALTIVEC_P (V4SImode)")
181			   (V2DI "VECTOR_UNIT_P8_VECTOR_P (V2DImode)")
182			   (V1TI "VECTOR_UNIT_ALTIVEC_P (V1TImode)")])
183
184;; Vector pack/unpack
185(define_mode_iterator VP [V2DI V4SI V8HI])
186(define_mode_attr VP_small [(V2DI "V4SI") (V4SI "V8HI") (V8HI "V16QI")])
187(define_mode_attr VP_small_lc [(V2DI "v4si") (V4SI "v8hi") (V8HI "v16qi")])
188(define_mode_attr VU_char [(V2DI "w") (V4SI "h") (V8HI "b")])
189
190;; Vector move instructions.
191(define_insn "*altivec_mov<mode>"
192  [(set (match_operand:VM2 0 "nonimmediate_operand" "=Z,v,v,*Y,*r,*r,v,v")
193	(match_operand:VM2 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
194  "VECTOR_MEM_ALTIVEC_P (<MODE>mode)
195   && (register_operand (operands[0], <MODE>mode)
196       || register_operand (operands[1], <MODE>mode))"
197{
198  switch (which_alternative)
199    {
200    case 0: return "stvx %1,%y0";
201    case 1: return "lvx %0,%y1";
202    case 2: return "vor %0,%1,%1";
203    case 3: return "#";
204    case 4: return "#";
205    case 5: return "#";
206    case 6: return "vxor %0,%0,%0";
207    case 7: return output_vec_const_move (operands);
208    default: gcc_unreachable ();
209    }
210}
211  [(set_attr "type" "vecstore,vecload,vecsimple,store,load,*,vecsimple,*")])
212
213;; Unlike other altivec moves, allow the GPRs, since a normal use of TImode
214;; is for unions.  However for plain data movement, slightly favor the vector
215;; loads
216(define_insn "*altivec_movti"
217  [(set (match_operand:TI 0 "nonimmediate_operand" "=Z,v,v,?Y,?r,?r,v,v")
218	(match_operand:TI 1 "input_operand" "v,Z,v,r,Y,r,j,W"))]
219  "VECTOR_MEM_ALTIVEC_P (TImode)
220   && (register_operand (operands[0], TImode)
221       || register_operand (operands[1], TImode))"
222{
223  switch (which_alternative)
224    {
225    case 0: return "stvx %1,%y0";
226    case 1: return "lvx %0,%y1";
227    case 2: return "vor %0,%1,%1";
228    case 3: return "#";
229    case 4: return "#";
230    case 5: return "#";
231    case 6: return "vxor %0,%0,%0";
232    case 7: return output_vec_const_move (operands);
233    default: gcc_unreachable ();
234    }
235}
236  [(set_attr "type" "vecstore,vecload,vecsimple,store,load,*,vecsimple,*")])
237
238;; Load up a vector with the most significant bit set by loading up -1 and
239;; doing a shift left
240(define_split
241  [(set (match_operand:VM 0 "altivec_register_operand" "")
242	(match_operand:VM 1 "easy_vector_constant_msb" ""))]
243  "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
244  [(const_int 0)]
245{
246  rtx dest = operands[0];
247  enum machine_mode mode = GET_MODE (operands[0]);
248  rtvec v;
249  int i, num_elements;
250
251  if (mode == V4SFmode)
252    {
253      mode = V4SImode;
254      dest = gen_lowpart (V4SImode, dest);
255    }
256
257  num_elements = GET_MODE_NUNITS (mode);
258  v = rtvec_alloc (num_elements);
259  for (i = 0; i < num_elements; i++)
260    RTVEC_ELT (v, i) = constm1_rtx;
261
262  emit_insn (gen_vec_initv4si (dest, gen_rtx_PARALLEL (mode, v)));
263  emit_insn (gen_rtx_SET (VOIDmode, dest, gen_rtx_ASHIFT (mode, dest, dest)));
264  DONE;
265})
266
267(define_split
268  [(set (match_operand:VM 0 "altivec_register_operand" "")
269	(match_operand:VM 1 "easy_vector_constant_add_self" ""))]
270  "VECTOR_UNIT_ALTIVEC_OR_VSX_P (<MODE>mode) && reload_completed"
271  [(set (match_dup 0) (match_dup 3))
272   (set (match_dup 0) (match_dup 4))]
273{
274  rtx dup = gen_easy_altivec_constant (operands[1]);
275  rtx const_vec;
276  enum machine_mode op_mode = <MODE>mode;
277
278  /* Divide the operand of the resulting VEC_DUPLICATE, and use
279     simplify_rtx to make a CONST_VECTOR.  */
280  XEXP (dup, 0) = simplify_const_binary_operation (ASHIFTRT, QImode,
281						   XEXP (dup, 0), const1_rtx);
282  const_vec = simplify_rtx (dup);
283
284  if (op_mode == V4SFmode)
285    {
286      op_mode = V4SImode;
287      operands[0] = gen_lowpart (op_mode, operands[0]);
288    }
289  if (GET_MODE (const_vec) == op_mode)
290    operands[3] = const_vec;
291  else
292    operands[3] = gen_lowpart (op_mode, const_vec);
293  operands[4] = gen_rtx_PLUS (op_mode, operands[0], operands[0]);
294})
295
296(define_insn "get_vrsave_internal"
297  [(set (match_operand:SI 0 "register_operand" "=r")
298	(unspec:SI [(reg:SI 109)] UNSPEC_GET_VRSAVE))]
299  "TARGET_ALTIVEC"
300{
301  if (TARGET_MACHO)
302     return "mfspr %0,256";
303  else
304     return "mfvrsave %0";
305}
306  [(set_attr "type" "*")])
307
308(define_insn "*set_vrsave_internal"
309  [(match_parallel 0 "vrsave_operation"
310     [(set (reg:SI 109)
311	   (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "r")
312				(reg:SI 109)] UNSPECV_SET_VRSAVE))])]
313  "TARGET_ALTIVEC"
314{
315  if (TARGET_MACHO)
316    return "mtspr 256,%1";
317  else
318    return "mtvrsave %1";
319}
320  [(set_attr "type" "*")])
321
322(define_insn "*save_world"
323 [(match_parallel 0 "save_world_operation"
324                  [(clobber (reg:SI 65))
325                   (use (match_operand:SI 1 "call_operand" "s"))])]
326 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
327 "bl %z1"
328  [(set_attr "type" "branch")
329   (set_attr "length" "4")])
330
331(define_insn "*restore_world"
332 [(match_parallel 0 "restore_world_operation"
333                  [(return)
334		   (use (reg:SI 65))
335                   (use (match_operand:SI 1 "call_operand" "s"))
336                   (clobber (match_operand:SI 2 "gpc_reg_operand" "=r"))])]
337 "TARGET_MACHO && (DEFAULT_ABI == ABI_DARWIN) && TARGET_32BIT"
338 "b %z1")
339
340;; The save_vregs and restore_vregs patterns don't use memory_operand
341;; because (plus (reg) (const_int)) is not a valid vector address.
342;; This way is more compact than describing exactly what happens in
343;; the out-of-line functions, ie. loading the constant into r11/r12
344;; then using indexed addressing, and requires less editing of rtl
345;; to describe the operation to dwarf2out_frame_debug_expr.
346(define_insn "*save_vregs_<mode>_r11"
347  [(match_parallel 0 "any_parallel_operand"
348     [(clobber (reg:P 65))
349      (use (match_operand:P 1 "symbol_ref_operand" "s"))
350      (clobber (reg:P 11))
351      (use (reg:P 0))
352      (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
353			     (match_operand:P 3 "short_cint_operand" "I")))
354	   (match_operand:V4SI 4 "gpc_reg_operand" "v"))])]
355  ""
356  "bl %1"
357  [(set_attr "type" "branch")
358   (set_attr "length" "4")])
359
360(define_insn "*save_vregs_<mode>_r12"
361  [(match_parallel 0 "any_parallel_operand"
362     [(clobber (reg:P 65))
363      (use (match_operand:P 1 "symbol_ref_operand" "s"))
364      (clobber (reg:P 12))
365      (use (reg:P 0))
366      (set (mem:V4SI (plus:P (match_operand:P 2 "gpc_reg_operand" "b")
367			     (match_operand:P 3 "short_cint_operand" "I")))
368	   (match_operand:V4SI 4 "gpc_reg_operand" "v"))])]
369  ""
370  "bl %1"
371  [(set_attr "type" "branch")
372   (set_attr "length" "4")])
373
374(define_insn "*restore_vregs_<mode>_r11"
375  [(match_parallel 0 "any_parallel_operand"
376     [(clobber (reg:P 65))
377      (use (match_operand:P 1 "symbol_ref_operand" "s"))
378      (clobber (reg:P 11))
379      (use (reg:P 0))
380      (set (match_operand:V4SI 2 "gpc_reg_operand" "=v")
381	   (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
382			     (match_operand:P 4 "short_cint_operand" "I"))))])]
383  ""
384  "bl %1"
385  [(set_attr "type" "branch")
386   (set_attr "length" "4")])
387
388(define_insn "*restore_vregs_<mode>_r12"
389  [(match_parallel 0 "any_parallel_operand"
390     [(clobber (reg:P 65))
391      (use (match_operand:P 1 "symbol_ref_operand" "s"))
392      (clobber (reg:P 12))
393      (use (reg:P 0))
394      (set (match_operand:V4SI 2 "gpc_reg_operand" "=v")
395	   (mem:V4SI (plus:P (match_operand:P 3 "gpc_reg_operand" "b")
396			     (match_operand:P 4 "short_cint_operand" "I"))))])]
397  ""
398  "bl %1"
399  [(set_attr "type" "branch")
400   (set_attr "length" "4")])
401
402;; Simple binary operations.
403
404;; add
405(define_insn "add<mode>3"
406  [(set (match_operand:VI2 0 "register_operand" "=v")
407        (plus:VI2 (match_operand:VI2 1 "register_operand" "v")
408		  (match_operand:VI2 2 "register_operand" "v")))]
409  "<VI_unit>"
410  "vaddu<VI_char>m %0,%1,%2"
411  [(set_attr "type" "vecsimple")])
412
413(define_insn "*altivec_addv4sf3"
414  [(set (match_operand:V4SF 0 "register_operand" "=v")
415        (plus:V4SF (match_operand:V4SF 1 "register_operand" "v")
416		   (match_operand:V4SF 2 "register_operand" "v")))]
417  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
418  "vaddfp %0,%1,%2"
419  [(set_attr "type" "vecfloat")])
420
421(define_insn "altivec_vaddcuw"
422  [(set (match_operand:V4SI 0 "register_operand" "=v")
423        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
424                      (match_operand:V4SI 2 "register_operand" "v")]
425		     UNSPEC_VADDCUW))]
426  "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
427  "vaddcuw %0,%1,%2"
428  [(set_attr "type" "vecsimple")])
429
430(define_insn "altivec_vaddu<VI_char>s"
431  [(set (match_operand:VI 0 "register_operand" "=v")
432        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
433		    (match_operand:VI 2 "register_operand" "v")]
434		   UNSPEC_VADDU))
435   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
436  "<VI_unit>"
437  "vaddu<VI_char>s %0,%1,%2"
438  [(set_attr "type" "vecsimple")])
439
440(define_insn "altivec_vadds<VI_char>s"
441  [(set (match_operand:VI 0 "register_operand" "=v")
442        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
443                    (match_operand:VI 2 "register_operand" "v")]
444		   UNSPEC_VADDS))
445   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
446  "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
447  "vadds<VI_char>s %0,%1,%2"
448  [(set_attr "type" "vecsimple")])
449
450;; sub
451(define_insn "sub<mode>3"
452  [(set (match_operand:VI2 0 "register_operand" "=v")
453        (minus:VI2 (match_operand:VI2 1 "register_operand" "v")
454		   (match_operand:VI2 2 "register_operand" "v")))]
455  "<VI_unit>"
456  "vsubu<VI_char>m %0,%1,%2"
457  [(set_attr "type" "vecsimple")])
458
459(define_insn "*altivec_subv4sf3"
460  [(set (match_operand:V4SF 0 "register_operand" "=v")
461        (minus:V4SF (match_operand:V4SF 1 "register_operand" "v")
462                    (match_operand:V4SF 2 "register_operand" "v")))]
463  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
464  "vsubfp %0,%1,%2"
465  [(set_attr "type" "vecfloat")])
466
467(define_insn "altivec_vsubcuw"
468  [(set (match_operand:V4SI 0 "register_operand" "=v")
469        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
470                      (match_operand:V4SI 2 "register_operand" "v")]
471		     UNSPEC_VSUBCUW))]
472  "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
473  "vsubcuw %0,%1,%2"
474  [(set_attr "type" "vecsimple")])
475
476(define_insn "altivec_vsubu<VI_char>s"
477  [(set (match_operand:VI 0 "register_operand" "=v")
478        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
479                    (match_operand:VI 2 "register_operand" "v")]
480		   UNSPEC_VSUBU))
481   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
482  "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
483  "vsubu<VI_char>s %0,%1,%2"
484  [(set_attr "type" "vecsimple")])
485
486(define_insn "altivec_vsubs<VI_char>s"
487  [(set (match_operand:VI 0 "register_operand" "=v")
488        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
489                    (match_operand:VI 2 "register_operand" "v")]
490		   UNSPEC_VSUBS))
491   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
492  "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
493  "vsubs<VI_char>s %0,%1,%2"
494  [(set_attr "type" "vecsimple")])
495
496;;
497(define_insn "altivec_vavgu<VI_char>"
498  [(set (match_operand:VI 0 "register_operand" "=v")
499        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
500                    (match_operand:VI 2 "register_operand" "v")]
501		   UNSPEC_VAVGU))]
502  "TARGET_ALTIVEC"
503  "vavgu<VI_char> %0,%1,%2"
504  [(set_attr "type" "vecsimple")])
505
506(define_insn "altivec_vavgs<VI_char>"
507  [(set (match_operand:VI 0 "register_operand" "=v")
508        (unspec:VI [(match_operand:VI 1 "register_operand" "v")
509                    (match_operand:VI 2 "register_operand" "v")]
510		   UNSPEC_VAVGS))]
511  "VECTOR_UNIT_ALTIVEC_P (<MODE>mode)"
512  "vavgs<VI_char> %0,%1,%2"
513  [(set_attr "type" "vecsimple")])
514
515(define_insn "altivec_vcmpbfp"
516  [(set (match_operand:V4SI 0 "register_operand" "=v")
517        (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
518                      (match_operand:V4SF 2 "register_operand" "v")]
519                      UNSPEC_VCMPBFP))]
520  "VECTOR_UNIT_ALTIVEC_P (V4SImode)"
521  "vcmpbfp %0,%1,%2"
522  [(set_attr "type" "veccmp")])
523
524(define_insn "*altivec_eq<mode>"
525  [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
526	(eq:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
527		(match_operand:VI2 2 "altivec_register_operand" "v")))]
528  "<VI_unit>"
529  "vcmpequ<VI_char> %0,%1,%2"
530  [(set_attr "type" "veccmp")])
531
532(define_insn "*altivec_gt<mode>"
533  [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
534	(gt:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
535		(match_operand:VI2 2 "altivec_register_operand" "v")))]
536  "<VI_unit>"
537  "vcmpgts<VI_char> %0,%1,%2"
538  [(set_attr "type" "veccmp")])
539
540(define_insn "*altivec_gtu<mode>"
541  [(set (match_operand:VI2 0 "altivec_register_operand" "=v")
542	(gtu:VI2 (match_operand:VI2 1 "altivec_register_operand" "v")
543		 (match_operand:VI2 2 "altivec_register_operand" "v")))]
544  "<VI_unit>"
545  "vcmpgtu<VI_char> %0,%1,%2"
546  [(set_attr "type" "veccmp")])
547
548(define_insn "*altivec_eqv4sf"
549  [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
550	(eq:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
551		 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
552  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
553  "vcmpeqfp %0,%1,%2"
554  [(set_attr "type" "veccmp")])
555
556(define_insn "*altivec_gtv4sf"
557  [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
558	(gt:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
559		 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
560  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
561  "vcmpgtfp %0,%1,%2"
562  [(set_attr "type" "veccmp")])
563
564(define_insn "*altivec_gev4sf"
565  [(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
566	(ge:V4SF (match_operand:V4SF 1 "altivec_register_operand" "v")
567		 (match_operand:V4SF 2 "altivec_register_operand" "v")))]
568  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
569  "vcmpgefp %0,%1,%2"
570  [(set_attr "type" "veccmp")])
571
572(define_insn "*altivec_vsel<mode>"
573  [(set (match_operand:VM 0 "altivec_register_operand" "=v")
574	(if_then_else:VM
575	 (ne:CC (match_operand:VM 1 "altivec_register_operand" "v")
576		(match_operand:VM 4 "zero_constant" ""))
577	 (match_operand:VM 2 "altivec_register_operand" "v")
578	 (match_operand:VM 3 "altivec_register_operand" "v")))]
579  "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
580  "vsel %0,%3,%2,%1"
581  [(set_attr "type" "vecperm")])
582
583(define_insn "*altivec_vsel<mode>_uns"
584  [(set (match_operand:VM 0 "altivec_register_operand" "=v")
585	(if_then_else:VM
586	 (ne:CCUNS (match_operand:VM 1 "altivec_register_operand" "v")
587		   (match_operand:VM 4 "zero_constant" ""))
588	 (match_operand:VM 2 "altivec_register_operand" "v")
589	 (match_operand:VM 3 "altivec_register_operand" "v")))]
590  "VECTOR_MEM_ALTIVEC_P (<MODE>mode)"
591  "vsel %0,%3,%2,%1"
592  [(set_attr "type" "vecperm")])
593
594;; Fused multiply add.
595
596(define_insn "*altivec_fmav4sf4"
597  [(set (match_operand:V4SF 0 "register_operand" "=v")
598	(fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
599		  (match_operand:V4SF 2 "register_operand" "v")
600		  (match_operand:V4SF 3 "register_operand" "v")))]
601  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
602  "vmaddfp %0,%1,%2,%3"
603  [(set_attr "type" "vecfloat")])
604
605;; We do multiply as a fused multiply-add with an add of a -0.0 vector.
606
607(define_expand "altivec_mulv4sf3"
608  [(set (match_operand:V4SF 0 "register_operand" "")
609	(fma:V4SF (match_operand:V4SF 1 "register_operand" "")
610		  (match_operand:V4SF 2 "register_operand" "")
611		  (match_dup 3)))]
612  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
613{
614  rtx neg0;
615
616  /* Generate [-0.0, -0.0, -0.0, -0.0].  */
617  neg0 = gen_reg_rtx (V4SImode);
618  emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
619  emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
620
621  operands[3] = gen_lowpart (V4SFmode, neg0);
622})
623
624;; 32-bit integer multiplication
625;; A_high = Operand_0 & 0xFFFF0000 >> 16
626;; A_low = Operand_0 & 0xFFFF
627;; B_high = Operand_1 & 0xFFFF0000 >> 16
628;; B_low = Operand_1 & 0xFFFF
629;; result = A_low * B_low + (A_high * B_low + B_high * A_low) << 16
630
631;; (define_insn "mulv4si3"
632;;   [(set (match_operand:V4SI 0 "register_operand" "=v")
633;;         (mult:V4SI (match_operand:V4SI 1 "register_operand" "v")
634;;                    (match_operand:V4SI 2 "register_operand" "v")))]
635(define_expand "mulv4si3"
636  [(use (match_operand:V4SI 0 "register_operand" ""))
637   (use (match_operand:V4SI 1 "register_operand" ""))
638   (use (match_operand:V4SI 2 "register_operand" ""))]
639   "TARGET_ALTIVEC"
640   "
641 {
642   rtx zero;
643   rtx swap;
644   rtx small_swap;
645   rtx sixteen;
646   rtx one;
647   rtx two;
648   rtx low_product;
649   rtx high_product;
650
651   zero = gen_reg_rtx (V4SImode);
652   emit_insn (gen_altivec_vspltisw (zero, const0_rtx));
653
654   sixteen = gen_reg_rtx (V4SImode);
655   emit_insn (gen_altivec_vspltisw (sixteen,  gen_rtx_CONST_INT (V4SImode, -16)));
656
657   swap = gen_reg_rtx (V4SImode);
658   emit_insn (gen_vrotlv4si3 (swap, operands[2], sixteen));
659
660   one = gen_reg_rtx (V8HImode);
661   convert_move (one, operands[1], 0);
662
663   two = gen_reg_rtx (V8HImode);
664   convert_move (two, operands[2], 0);
665
666   small_swap = gen_reg_rtx (V8HImode);
667   convert_move (small_swap, swap, 0);
668
669   low_product = gen_reg_rtx (V4SImode);
670   emit_insn (gen_altivec_vmulouh (low_product, one, two));
671
672   high_product = gen_reg_rtx (V4SImode);
673   emit_insn (gen_altivec_vmsumuhm (high_product, one, small_swap, zero));
674
675   emit_insn (gen_vashlv4si3 (high_product, high_product, sixteen));
676
677   emit_insn (gen_addv4si3 (operands[0], high_product, low_product));
678
679   DONE;
680 }")
681
682(define_expand "mulv8hi3"
683  [(use (match_operand:V8HI 0 "register_operand" ""))
684   (use (match_operand:V8HI 1 "register_operand" ""))
685   (use (match_operand:V8HI 2 "register_operand" ""))]
686   "TARGET_ALTIVEC"
687   "
688{
689   rtx odd = gen_reg_rtx (V4SImode);
690   rtx even = gen_reg_rtx (V4SImode);
691   rtx high = gen_reg_rtx (V4SImode);
692   rtx low = gen_reg_rtx (V4SImode);
693
694   if (BYTES_BIG_ENDIAN)
695     {
696       emit_insn (gen_altivec_vmulesh (even, operands[1], operands[2]));
697       emit_insn (gen_altivec_vmulosh (odd, operands[1], operands[2]));
698       emit_insn (gen_altivec_vmrghw_direct (high, even, odd));
699       emit_insn (gen_altivec_vmrglw_direct (low, even, odd));
700       emit_insn (gen_altivec_vpkuwum_direct (operands[0], high, low));
701     }
702   else
703     {
704       emit_insn (gen_altivec_vmulosh (even, operands[1], operands[2]));
705       emit_insn (gen_altivec_vmulesh (odd, operands[1], operands[2]));
706       emit_insn (gen_altivec_vmrghw_direct (high, odd, even));
707       emit_insn (gen_altivec_vmrglw_direct (low, odd, even));
708       emit_insn (gen_altivec_vpkuwum_direct (operands[0], low, high));
709     }
710
711   DONE;
712}")
713
714;; Fused multiply subtract
715(define_insn "*altivec_vnmsubfp"
716  [(set (match_operand:V4SF 0 "register_operand" "=v")
717	(neg:V4SF
718	 (fma:V4SF (match_operand:V4SF 1 "register_operand" "v")
719		   (match_operand:V4SF 2 "register_operand" "v")
720		   (neg:V4SF
721		    (match_operand:V4SF 3 "register_operand" "v")))))]
722  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
723  "vnmsubfp %0,%1,%2,%3"
724  [(set_attr "type" "vecfloat")])
725
726(define_insn "altivec_vmsumu<VI_char>m"
727  [(set (match_operand:V4SI 0 "register_operand" "=v")
728        (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
729		      (match_operand:VIshort 2 "register_operand" "v")
730                      (match_operand:V4SI 3 "register_operand" "v")]
731		     UNSPEC_VMSUMU))]
732  "TARGET_ALTIVEC"
733  "vmsumu<VI_char>m %0,%1,%2,%3"
734  [(set_attr "type" "veccomplex")])
735
736(define_insn "altivec_vmsumm<VI_char>m"
737  [(set (match_operand:V4SI 0 "register_operand" "=v")
738        (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
739		      (match_operand:VIshort 2 "register_operand" "v")
740                      (match_operand:V4SI 3 "register_operand" "v")]
741		     UNSPEC_VMSUMM))]
742  "TARGET_ALTIVEC"
743  "vmsumm<VI_char>m %0,%1,%2,%3"
744  [(set_attr "type" "veccomplex")])
745
746(define_insn "altivec_vmsumshm"
747  [(set (match_operand:V4SI 0 "register_operand" "=v")
748        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
749		      (match_operand:V8HI 2 "register_operand" "v")
750                      (match_operand:V4SI 3 "register_operand" "v")]
751		     UNSPEC_VMSUMSHM))]
752  "TARGET_ALTIVEC"
753  "vmsumshm %0,%1,%2,%3"
754  [(set_attr "type" "veccomplex")])
755
756(define_insn "altivec_vmsumuhs"
757  [(set (match_operand:V4SI 0 "register_operand" "=v")
758        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
759		      (match_operand:V8HI 2 "register_operand" "v")
760                      (match_operand:V4SI 3 "register_operand" "v")]
761		     UNSPEC_VMSUMUHS))
762   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
763  "TARGET_ALTIVEC"
764  "vmsumuhs %0,%1,%2,%3"
765  [(set_attr "type" "veccomplex")])
766
767(define_insn "altivec_vmsumshs"
768  [(set (match_operand:V4SI 0 "register_operand" "=v")
769        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
770		      (match_operand:V8HI 2 "register_operand" "v")
771                      (match_operand:V4SI 3 "register_operand" "v")]
772		     UNSPEC_VMSUMSHS))
773   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
774  "TARGET_ALTIVEC"
775  "vmsumshs %0,%1,%2,%3"
776  [(set_attr "type" "veccomplex")])
777
778;; max
779
780(define_insn "umax<mode>3"
781  [(set (match_operand:VI2 0 "register_operand" "=v")
782        (umax:VI2 (match_operand:VI2 1 "register_operand" "v")
783		  (match_operand:VI2 2 "register_operand" "v")))]
784  "<VI_unit>"
785  "vmaxu<VI_char> %0,%1,%2"
786  [(set_attr "type" "vecsimple")])
787
788(define_insn "smax<mode>3"
789  [(set (match_operand:VI2 0 "register_operand" "=v")
790        (smax:VI2 (match_operand:VI2 1 "register_operand" "v")
791		  (match_operand:VI2 2 "register_operand" "v")))]
792  "<VI_unit>"
793  "vmaxs<VI_char> %0,%1,%2"
794  [(set_attr "type" "vecsimple")])
795
796(define_insn "*altivec_smaxv4sf3"
797  [(set (match_operand:V4SF 0 "register_operand" "=v")
798        (smax:V4SF (match_operand:V4SF 1 "register_operand" "v")
799                   (match_operand:V4SF 2 "register_operand" "v")))]
800  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
801  "vmaxfp %0,%1,%2"
802  [(set_attr "type" "veccmp")])
803
804(define_insn "umin<mode>3"
805  [(set (match_operand:VI2 0 "register_operand" "=v")
806        (umin:VI2 (match_operand:VI2 1 "register_operand" "v")
807		  (match_operand:VI2 2 "register_operand" "v")))]
808  "<VI_unit>"
809  "vminu<VI_char> %0,%1,%2"
810  [(set_attr "type" "vecsimple")])
811
812(define_insn "smin<mode>3"
813  [(set (match_operand:VI2 0 "register_operand" "=v")
814        (smin:VI2 (match_operand:VI2 1 "register_operand" "v")
815		  (match_operand:VI2 2 "register_operand" "v")))]
816  "<VI_unit>"
817  "vmins<VI_char> %0,%1,%2"
818  [(set_attr "type" "vecsimple")])
819
820(define_insn "*altivec_sminv4sf3"
821  [(set (match_operand:V4SF 0 "register_operand" "=v")
822        (smin:V4SF (match_operand:V4SF 1 "register_operand" "v")
823                   (match_operand:V4SF 2 "register_operand" "v")))]
824  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
825  "vminfp %0,%1,%2"
826  [(set_attr "type" "veccmp")])
827
828(define_insn "altivec_vmhaddshs"
829  [(set (match_operand:V8HI 0 "register_operand" "=v")
830        (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
831		      (match_operand:V8HI 2 "register_operand" "v")
832                      (match_operand:V8HI 3 "register_operand" "v")]
833		     UNSPEC_VMHADDSHS))
834   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
835  "TARGET_ALTIVEC"
836  "vmhaddshs %0,%1,%2,%3"
837  [(set_attr "type" "veccomplex")])
838
839(define_insn "altivec_vmhraddshs"
840  [(set (match_operand:V8HI 0 "register_operand" "=v")
841        (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
842		      (match_operand:V8HI 2 "register_operand" "v")
843                      (match_operand:V8HI 3 "register_operand" "v")]
844		     UNSPEC_VMHRADDSHS))
845   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
846  "TARGET_ALTIVEC"
847  "vmhraddshs %0,%1,%2,%3"
848  [(set_attr "type" "veccomplex")])
849
850(define_insn "altivec_vmladduhm"
851  [(set (match_operand:V8HI 0 "register_operand" "=v")
852        (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
853		      (match_operand:V8HI 2 "register_operand" "v")
854                      (match_operand:V8HI 3 "register_operand" "v")]
855		     UNSPEC_VMLADDUHM))]
856  "TARGET_ALTIVEC"
857  "vmladduhm %0,%1,%2,%3"
858  [(set_attr "type" "veccomplex")])
859
860(define_expand "altivec_vmrghb"
861  [(use (match_operand:V16QI 0 "register_operand" ""))
862   (use (match_operand:V16QI 1 "register_operand" ""))
863   (use (match_operand:V16QI 2 "register_operand" ""))]
864  "TARGET_ALTIVEC"
865{
866  rtvec v;
867  rtx x;
868
869  /* Special handling for LE with -maltivec=be.  */
870  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
871    {
872      v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
873                     GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
874		     GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
875		     GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
876      x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
877    }
878  else
879    {
880      v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
881                     GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
882		     GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
883		     GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
884      x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
885    }
886
887  x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
888  emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
889  DONE;
890})
891
892(define_insn "*altivec_vmrghb_internal"
893  [(set (match_operand:V16QI 0 "register_operand" "=v")
894        (vec_select:V16QI
895	  (vec_concat:V32QI
896	    (match_operand:V16QI 1 "register_operand" "v")
897	    (match_operand:V16QI 2 "register_operand" "v"))
898	  (parallel [(const_int 0) (const_int 16)
899		     (const_int 1) (const_int 17)
900		     (const_int 2) (const_int 18)
901		     (const_int 3) (const_int 19)
902		     (const_int 4) (const_int 20)
903		     (const_int 5) (const_int 21)
904		     (const_int 6) (const_int 22)
905		     (const_int 7) (const_int 23)])))]
906  "TARGET_ALTIVEC"
907{
908  if (BYTES_BIG_ENDIAN)
909    return "vmrghb %0,%1,%2";
910  else
911    return "vmrglb %0,%2,%1";
912}
913  [(set_attr "type" "vecperm")])
914
915(define_insn "altivec_vmrghb_direct"
916  [(set (match_operand:V16QI 0 "register_operand" "=v")
917        (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
918                       (match_operand:V16QI 2 "register_operand" "v")]
919		      UNSPEC_VMRGH_DIRECT))]
920  "TARGET_ALTIVEC"
921  "vmrghb %0,%1,%2"
922  [(set_attr "type" "vecperm")])
923
924(define_expand "altivec_vmrghh"
925  [(use (match_operand:V8HI 0 "register_operand" ""))
926   (use (match_operand:V8HI 1 "register_operand" ""))
927   (use (match_operand:V8HI 2 "register_operand" ""))]
928  "TARGET_ALTIVEC"
929{
930  rtvec v;
931  rtx x;
932
933  /* Special handling for LE with -maltivec=be.  */
934  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
935    {
936      v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
937                     GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
938      x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
939    }
940  else
941    {
942      v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
943                     GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
944      x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
945    }
946
947  x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
948  emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
949  DONE;
950})
951
952(define_insn "*altivec_vmrghh_internal"
953  [(set (match_operand:V8HI 0 "register_operand" "=v")
954        (vec_select:V8HI
955	  (vec_concat:V16HI
956	    (match_operand:V8HI 1 "register_operand" "v")
957	    (match_operand:V8HI 2 "register_operand" "v"))
958	  (parallel [(const_int 0) (const_int 8)
959		     (const_int 1) (const_int 9)
960		     (const_int 2) (const_int 10)
961		     (const_int 3) (const_int 11)])))]
962  "TARGET_ALTIVEC"
963{
964  if (BYTES_BIG_ENDIAN)
965    return "vmrghh %0,%1,%2";
966  else
967    return "vmrglh %0,%2,%1";
968}
969  [(set_attr "type" "vecperm")])
970
971(define_insn "altivec_vmrghh_direct"
972  [(set (match_operand:V8HI 0 "register_operand" "=v")
973        (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
974                      (match_operand:V8HI 2 "register_operand" "v")]
975                     UNSPEC_VMRGH_DIRECT))]
976  "TARGET_ALTIVEC"
977  "vmrghh %0,%1,%2"
978  [(set_attr "type" "vecperm")])
979
980(define_expand "altivec_vmrghw"
981  [(use (match_operand:V4SI 0 "register_operand" ""))
982   (use (match_operand:V4SI 1 "register_operand" ""))
983   (use (match_operand:V4SI 2 "register_operand" ""))]
984  "VECTOR_MEM_ALTIVEC_P (V4SImode)"
985{
986  rtvec v;
987  rtx x;
988
989  /* Special handling for LE with -maltivec=be.  */
990  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
991    {
992      v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
993      x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
994    }
995  else
996    {
997      v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
998      x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
999    }
1000
1001  x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1002  emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1003  DONE;
1004})
1005
1006(define_insn "*altivec_vmrghw_internal"
1007  [(set (match_operand:V4SI 0 "register_operand" "=v")
1008        (vec_select:V4SI
1009	  (vec_concat:V8SI
1010	    (match_operand:V4SI 1 "register_operand" "v")
1011	    (match_operand:V4SI 2 "register_operand" "v"))
1012	  (parallel [(const_int 0) (const_int 4)
1013		     (const_int 1) (const_int 5)])))]
1014  "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1015{
1016  if (BYTES_BIG_ENDIAN)
1017    return "vmrghw %0,%1,%2";
1018  else
1019    return "vmrglw %0,%2,%1";
1020}
1021  [(set_attr "type" "vecperm")])
1022
1023(define_insn "altivec_vmrghw_direct"
1024  [(set (match_operand:V4SI 0 "register_operand" "=v")
1025        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1026                      (match_operand:V4SI 2 "register_operand" "v")]
1027                     UNSPEC_VMRGH_DIRECT))]
1028  "TARGET_ALTIVEC"
1029  "vmrghw %0,%1,%2"
1030  [(set_attr "type" "vecperm")])
1031
1032(define_insn "*altivec_vmrghsf"
1033  [(set (match_operand:V4SF 0 "register_operand" "=v")
1034        (vec_select:V4SF
1035	  (vec_concat:V8SF
1036	    (match_operand:V4SF 1 "register_operand" "v")
1037	    (match_operand:V4SF 2 "register_operand" "v"))
1038	  (parallel [(const_int 0) (const_int 4)
1039		     (const_int 1) (const_int 5)])))]
1040  "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1041{
1042  if (BYTES_BIG_ENDIAN)
1043    return "vmrghw %0,%1,%2";
1044  else
1045    return "vmrglw %0,%2,%1";
1046}
1047  [(set_attr "type" "vecperm")])
1048
1049(define_expand "altivec_vmrglb"
1050  [(use (match_operand:V16QI 0 "register_operand" ""))
1051   (use (match_operand:V16QI 1 "register_operand" ""))
1052   (use (match_operand:V16QI 2 "register_operand" ""))]
1053  "TARGET_ALTIVEC"
1054{
1055  rtvec v;
1056  rtx x;
1057
1058  /* Special handling for LE with -maltivec=be.  */
1059  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1060    {
1061      v = gen_rtvec (16, GEN_INT (0), GEN_INT (16), GEN_INT (1), GEN_INT (17),
1062                     GEN_INT (2), GEN_INT (18), GEN_INT (3), GEN_INT (19),
1063		     GEN_INT (4), GEN_INT (20), GEN_INT (5), GEN_INT (21),
1064		     GEN_INT (6), GEN_INT (22), GEN_INT (7), GEN_INT (23));
1065      x = gen_rtx_VEC_CONCAT (V32QImode, operands[2], operands[1]);
1066    }
1067  else
1068    {
1069      v = gen_rtvec (16, GEN_INT (8), GEN_INT (24), GEN_INT (9), GEN_INT (25),
1070                     GEN_INT (10), GEN_INT (26), GEN_INT (11), GEN_INT (27),
1071		     GEN_INT (12), GEN_INT (28), GEN_INT (13), GEN_INT (29),
1072		     GEN_INT (14), GEN_INT (30), GEN_INT (15), GEN_INT (31));
1073      x = gen_rtx_VEC_CONCAT (V32QImode, operands[1], operands[2]);
1074    }
1075
1076  x = gen_rtx_VEC_SELECT (V16QImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1077  emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1078  DONE;
1079})
1080
1081(define_insn "*altivec_vmrglb_internal"
1082  [(set (match_operand:V16QI 0 "register_operand" "=v")
1083        (vec_select:V16QI
1084	  (vec_concat:V32QI
1085	    (match_operand:V16QI 1 "register_operand" "v")
1086	    (match_operand:V16QI 2 "register_operand" "v"))
1087	  (parallel [(const_int  8) (const_int 24)
1088		     (const_int  9) (const_int 25)
1089		     (const_int 10) (const_int 26)
1090		     (const_int 11) (const_int 27)
1091		     (const_int 12) (const_int 28)
1092		     (const_int 13) (const_int 29)
1093		     (const_int 14) (const_int 30)
1094		     (const_int 15) (const_int 31)])))]
1095  "TARGET_ALTIVEC"
1096{
1097  if (BYTES_BIG_ENDIAN)
1098    return "vmrglb %0,%1,%2";
1099  else
1100    return "vmrghb %0,%2,%1";
1101}
1102  [(set_attr "type" "vecperm")])
1103
1104(define_insn "altivec_vmrglb_direct"
1105  [(set (match_operand:V16QI 0 "register_operand" "=v")
1106        (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1107    		       (match_operand:V16QI 2 "register_operand" "v")]
1108                      UNSPEC_VMRGL_DIRECT))]
1109  "TARGET_ALTIVEC"
1110  "vmrglb %0,%1,%2"
1111  [(set_attr "type" "vecperm")])
1112
1113(define_expand "altivec_vmrglh"
1114  [(use (match_operand:V8HI 0 "register_operand" ""))
1115   (use (match_operand:V8HI 1 "register_operand" ""))
1116   (use (match_operand:V8HI 2 "register_operand" ""))]
1117  "TARGET_ALTIVEC"
1118{
1119  rtvec v;
1120  rtx x;
1121
1122  /* Special handling for LE with -maltivec=be.  */
1123  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1124    {
1125      v = gen_rtvec (8, GEN_INT (0), GEN_INT (8), GEN_INT (1), GEN_INT (9),
1126                     GEN_INT (2), GEN_INT (10), GEN_INT (3), GEN_INT (11));
1127      x = gen_rtx_VEC_CONCAT (V16HImode, operands[2], operands[1]);
1128    }
1129  else
1130    {
1131      v = gen_rtvec (8, GEN_INT (4), GEN_INT (12), GEN_INT (5), GEN_INT (13),
1132                     GEN_INT (6), GEN_INT (14), GEN_INT (7), GEN_INT (15));
1133      x = gen_rtx_VEC_CONCAT (V16HImode, operands[1], operands[2]);
1134    }
1135
1136  x = gen_rtx_VEC_SELECT (V8HImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1137  emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1138  DONE;
1139})
1140
1141(define_insn "*altivec_vmrglh_internal"
1142  [(set (match_operand:V8HI 0 "register_operand" "=v")
1143        (vec_select:V8HI
1144	  (vec_concat:V16HI
1145	    (match_operand:V8HI 1 "register_operand" "v")
1146	    (match_operand:V8HI 2 "register_operand" "v"))
1147	  (parallel [(const_int 4) (const_int 12)
1148		     (const_int 5) (const_int 13)
1149		     (const_int 6) (const_int 14)
1150		     (const_int 7) (const_int 15)])))]
1151  "TARGET_ALTIVEC"
1152{
1153  if (BYTES_BIG_ENDIAN)
1154    return "vmrglh %0,%1,%2";
1155  else
1156    return "vmrghh %0,%2,%1";
1157}
1158  [(set_attr "type" "vecperm")])
1159
1160(define_insn "altivec_vmrglh_direct"
1161  [(set (match_operand:V8HI 0 "register_operand" "=v")
1162        (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1163		      (match_operand:V8HI 2 "register_operand" "v")]
1164                     UNSPEC_VMRGL_DIRECT))]
1165  "TARGET_ALTIVEC"
1166  "vmrglh %0,%1,%2"
1167  [(set_attr "type" "vecperm")])
1168
1169(define_expand "altivec_vmrglw"
1170  [(use (match_operand:V4SI 0 "register_operand" ""))
1171   (use (match_operand:V4SI 1 "register_operand" ""))
1172   (use (match_operand:V4SI 2 "register_operand" ""))]
1173  "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1174{
1175  rtvec v;
1176  rtx x;
1177
1178  /* Special handling for LE with -maltivec=be.  */
1179  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1180    {
1181      v = gen_rtvec (4, GEN_INT (0), GEN_INT (4), GEN_INT (1), GEN_INT (5));
1182      x = gen_rtx_VEC_CONCAT (V8SImode, operands[2], operands[1]);
1183    }
1184  else
1185    {
1186      v = gen_rtvec (4, GEN_INT (2), GEN_INT (6), GEN_INT (3), GEN_INT (7));
1187      x = gen_rtx_VEC_CONCAT (V8SImode, operands[1], operands[2]);
1188    }
1189
1190  x = gen_rtx_VEC_SELECT (V4SImode, x, gen_rtx_PARALLEL (VOIDmode, v));
1191  emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1192  DONE;
1193})
1194
1195(define_insn "*altivec_vmrglw_internal"
1196  [(set (match_operand:V4SI 0 "register_operand" "=v")
1197        (vec_select:V4SI
1198	  (vec_concat:V8SI
1199	    (match_operand:V4SI 1 "register_operand" "v")
1200	    (match_operand:V4SI 2 "register_operand" "v"))
1201	  (parallel [(const_int 2) (const_int 6)
1202		     (const_int 3) (const_int 7)])))]
1203  "VECTOR_MEM_ALTIVEC_P (V4SImode)"
1204{
1205  if (BYTES_BIG_ENDIAN)
1206    return "vmrglw %0,%1,%2";
1207  else
1208    return "vmrghw %0,%2,%1";
1209}
1210  [(set_attr "type" "vecperm")])
1211
1212(define_insn "altivec_vmrglw_direct"
1213  [(set (match_operand:V4SI 0 "register_operand" "=v")
1214        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1215	              (match_operand:V4SI 2 "register_operand" "v")]
1216                     UNSPEC_VMRGL_DIRECT))]
1217  "TARGET_ALTIVEC"
1218  "vmrglw %0,%1,%2"
1219  [(set_attr "type" "vecperm")])
1220
1221(define_insn "*altivec_vmrglsf"
1222  [(set (match_operand:V4SF 0 "register_operand" "=v")
1223        (vec_select:V4SF
1224	 (vec_concat:V8SF
1225	   (match_operand:V4SF 1 "register_operand" "v")
1226	   (match_operand:V4SF 2 "register_operand" "v"))
1227	 (parallel [(const_int 2) (const_int 6)
1228		    (const_int 3) (const_int 7)])))]
1229  "VECTOR_MEM_ALTIVEC_P (V4SFmode)"
1230{
1231  if (BYTES_BIG_ENDIAN)
1232    return "vmrglw %0,%1,%2";
1233  else
1234    return "vmrghw %0,%2,%1";
1235}
1236  [(set_attr "type" "vecperm")])
1237
1238;; Power8 vector merge even/odd
1239(define_insn "p8_vmrgew"
1240  [(set (match_operand:V4SI 0 "register_operand" "=v")
1241	(vec_select:V4SI
1242	  (vec_concat:V8SI
1243	    (match_operand:V4SI 1 "register_operand" "v")
1244	    (match_operand:V4SI 2 "register_operand" "v"))
1245	  (parallel [(const_int 0) (const_int 4)
1246		     (const_int 2) (const_int 6)])))]
1247  "TARGET_P8_VECTOR"
1248{
1249  if (BYTES_BIG_ENDIAN)
1250    return "vmrgew %0,%1,%2";
1251  else
1252    return "vmrgow %0,%2,%1";
1253}
1254  [(set_attr "type" "vecperm")])
1255
1256(define_insn "p8_vmrgow"
1257  [(set (match_operand:V4SI 0 "register_operand" "=v")
1258	(vec_select:V4SI
1259	  (vec_concat:V8SI
1260	    (match_operand:V4SI 1 "register_operand" "v")
1261	    (match_operand:V4SI 2 "register_operand" "v"))
1262	  (parallel [(const_int 1) (const_int 5)
1263		     (const_int 3) (const_int 7)])))]
1264  "TARGET_P8_VECTOR"
1265{
1266  if (BYTES_BIG_ENDIAN)
1267    return "vmrgow %0,%1,%2";
1268  else
1269    return "vmrgew %0,%2,%1";
1270}
1271  [(set_attr "type" "vecperm")])
1272
1273(define_expand "vec_widen_umult_even_v16qi"
1274  [(use (match_operand:V8HI 0 "register_operand" ""))
1275   (use (match_operand:V16QI 1 "register_operand" ""))
1276   (use (match_operand:V16QI 2 "register_operand" ""))]
1277  "TARGET_ALTIVEC"
1278{
1279  if (VECTOR_ELT_ORDER_BIG)
1280    emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1281  else
1282    emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1283  DONE;
1284})
1285
1286(define_expand "vec_widen_smult_even_v16qi"
1287  [(use (match_operand:V8HI 0 "register_operand" ""))
1288   (use (match_operand:V16QI 1 "register_operand" ""))
1289   (use (match_operand:V16QI 2 "register_operand" ""))]
1290  "TARGET_ALTIVEC"
1291{
1292  if (VECTOR_ELT_ORDER_BIG)
1293    emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1294  else
1295    emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1296  DONE;
1297})
1298
1299(define_expand "vec_widen_umult_even_v8hi"
1300  [(use (match_operand:V4SI 0 "register_operand" ""))
1301   (use (match_operand:V8HI 1 "register_operand" ""))
1302   (use (match_operand:V8HI 2 "register_operand" ""))]
1303  "TARGET_ALTIVEC"
1304{
1305  if (VECTOR_ELT_ORDER_BIG)
1306    emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1307  else
1308    emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1309  DONE;
1310})
1311
1312(define_expand "vec_widen_smult_even_v8hi"
1313  [(use (match_operand:V4SI 0 "register_operand" ""))
1314   (use (match_operand:V8HI 1 "register_operand" ""))
1315   (use (match_operand:V8HI 2 "register_operand" ""))]
1316  "TARGET_ALTIVEC"
1317{
1318  if (VECTOR_ELT_ORDER_BIG)
1319    emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1320  else
1321    emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1322  DONE;
1323})
1324
1325(define_expand "vec_widen_umult_odd_v16qi"
1326  [(use (match_operand:V8HI 0 "register_operand" ""))
1327   (use (match_operand:V16QI 1 "register_operand" ""))
1328   (use (match_operand:V16QI 2 "register_operand" ""))]
1329  "TARGET_ALTIVEC"
1330{
1331  if (VECTOR_ELT_ORDER_BIG)
1332    emit_insn (gen_altivec_vmuloub (operands[0], operands[1], operands[2]));
1333  else
1334    emit_insn (gen_altivec_vmuleub (operands[0], operands[1], operands[2]));
1335  DONE;
1336})
1337
1338(define_expand "vec_widen_smult_odd_v16qi"
1339  [(use (match_operand:V8HI 0 "register_operand" ""))
1340   (use (match_operand:V16QI 1 "register_operand" ""))
1341   (use (match_operand:V16QI 2 "register_operand" ""))]
1342  "TARGET_ALTIVEC"
1343{
1344  if (VECTOR_ELT_ORDER_BIG)
1345    emit_insn (gen_altivec_vmulosb (operands[0], operands[1], operands[2]));
1346  else
1347    emit_insn (gen_altivec_vmulesb (operands[0], operands[1], operands[2]));
1348  DONE;
1349})
1350
1351(define_expand "vec_widen_umult_odd_v8hi"
1352  [(use (match_operand:V4SI 0 "register_operand" ""))
1353   (use (match_operand:V8HI 1 "register_operand" ""))
1354   (use (match_operand:V8HI 2 "register_operand" ""))]
1355  "TARGET_ALTIVEC"
1356{
1357  if (VECTOR_ELT_ORDER_BIG)
1358    emit_insn (gen_altivec_vmulouh (operands[0], operands[1], operands[2]));
1359  else
1360    emit_insn (gen_altivec_vmuleuh (operands[0], operands[1], operands[2]));
1361  DONE;
1362})
1363
1364(define_expand "vec_widen_smult_odd_v8hi"
1365  [(use (match_operand:V4SI 0 "register_operand" ""))
1366   (use (match_operand:V8HI 1 "register_operand" ""))
1367   (use (match_operand:V8HI 2 "register_operand" ""))]
1368  "TARGET_ALTIVEC"
1369{
1370  if (VECTOR_ELT_ORDER_BIG)
1371    emit_insn (gen_altivec_vmulosh (operands[0], operands[1], operands[2]));
1372  else
1373    emit_insn (gen_altivec_vmulesh (operands[0], operands[1], operands[2]));
1374  DONE;
1375})
1376
1377(define_insn "altivec_vmuleub"
1378  [(set (match_operand:V8HI 0 "register_operand" "=v")
1379        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1380                      (match_operand:V16QI 2 "register_operand" "v")]
1381		     UNSPEC_VMULEUB))]
1382  "TARGET_ALTIVEC"
1383  "vmuleub %0,%1,%2"
1384  [(set_attr "type" "veccomplex")])
1385
1386(define_insn "altivec_vmuloub"
1387  [(set (match_operand:V8HI 0 "register_operand" "=v")
1388        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1389                      (match_operand:V16QI 2 "register_operand" "v")]
1390		     UNSPEC_VMULOUB))]
1391  "TARGET_ALTIVEC"
1392  "vmuloub %0,%1,%2"
1393  [(set_attr "type" "veccomplex")])
1394
1395(define_insn "altivec_vmulesb"
1396  [(set (match_operand:V8HI 0 "register_operand" "=v")
1397        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1398                      (match_operand:V16QI 2 "register_operand" "v")]
1399		     UNSPEC_VMULESB))]
1400  "TARGET_ALTIVEC"
1401  "vmulesb %0,%1,%2"
1402  [(set_attr "type" "veccomplex")])
1403
1404(define_insn "altivec_vmulosb"
1405  [(set (match_operand:V8HI 0 "register_operand" "=v")
1406        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
1407                      (match_operand:V16QI 2 "register_operand" "v")]
1408		     UNSPEC_VMULOSB))]
1409  "TARGET_ALTIVEC"
1410  "vmulosb %0,%1,%2"
1411  [(set_attr "type" "veccomplex")])
1412
1413(define_insn "altivec_vmuleuh"
1414  [(set (match_operand:V4SI 0 "register_operand" "=v")
1415        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1416                      (match_operand:V8HI 2 "register_operand" "v")]
1417		     UNSPEC_VMULEUH))]
1418  "TARGET_ALTIVEC"
1419  "vmuleuh %0,%1,%2"
1420  [(set_attr "type" "veccomplex")])
1421
1422(define_insn "altivec_vmulouh"
1423  [(set (match_operand:V4SI 0 "register_operand" "=v")
1424        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1425                      (match_operand:V8HI 2 "register_operand" "v")]
1426		     UNSPEC_VMULOUH))]
1427  "TARGET_ALTIVEC"
1428  "vmulouh %0,%1,%2"
1429  [(set_attr "type" "veccomplex")])
1430
1431(define_insn "altivec_vmulesh"
1432  [(set (match_operand:V4SI 0 "register_operand" "=v")
1433        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1434                      (match_operand:V8HI 2 "register_operand" "v")]
1435		     UNSPEC_VMULESH))]
1436  "TARGET_ALTIVEC"
1437  "vmulesh %0,%1,%2"
1438  [(set_attr "type" "veccomplex")])
1439
1440(define_insn "altivec_vmulosh"
1441  [(set (match_operand:V4SI 0 "register_operand" "=v")
1442        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
1443                      (match_operand:V8HI 2 "register_operand" "v")]
1444		     UNSPEC_VMULOSH))]
1445  "TARGET_ALTIVEC"
1446  "vmulosh %0,%1,%2"
1447  [(set_attr "type" "veccomplex")])
1448
1449
1450;; Vector pack/unpack
1451(define_insn "altivec_vpkpx"
1452  [(set (match_operand:V8HI 0 "register_operand" "=v")
1453        (unspec:V8HI [(match_operand:V4SI 1 "register_operand" "v")
1454                      (match_operand:V4SI 2 "register_operand" "v")]
1455		     UNSPEC_VPKPX))]
1456  "TARGET_ALTIVEC"
1457  "*
1458  {
1459    if (VECTOR_ELT_ORDER_BIG)
1460      return \"vpkpx %0,%1,%2\";
1461    else
1462      return \"vpkpx %0,%2,%1\";
1463  }"
1464  [(set_attr "type" "vecperm")])
1465
1466(define_insn "altivec_vpks<VI_char>ss"
1467  [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1468	(unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1469			    (match_operand:VP 2 "register_operand" "v")]
1470			   UNSPEC_VPACK_SIGN_SIGN_SAT))]
1471  "<VI_unit>"
1472  "*
1473  {
1474    if (VECTOR_ELT_ORDER_BIG)
1475      return \"vpks<VI_char>ss %0,%1,%2\";
1476    else
1477      return \"vpks<VI_char>ss %0,%2,%1\";
1478  }"
1479  [(set_attr "type" "vecperm")])
1480
1481(define_insn "altivec_vpks<VI_char>us"
1482  [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1483	(unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1484			    (match_operand:VP 2 "register_operand" "v")]
1485			   UNSPEC_VPACK_SIGN_UNS_SAT))]
1486  "<VI_unit>"
1487  "*
1488  {
1489    if (VECTOR_ELT_ORDER_BIG)
1490      return \"vpks<VI_char>us %0,%1,%2\";
1491    else
1492      return \"vpks<VI_char>us %0,%2,%1\";
1493  }"
1494  [(set_attr "type" "vecperm")])
1495
1496(define_insn "altivec_vpku<VI_char>us"
1497  [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1498	(unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1499			    (match_operand:VP 2 "register_operand" "v")]
1500			   UNSPEC_VPACK_UNS_UNS_SAT))]
1501  "<VI_unit>"
1502  "*
1503  {
1504    if (VECTOR_ELT_ORDER_BIG)
1505      return \"vpku<VI_char>us %0,%1,%2\";
1506    else
1507      return \"vpku<VI_char>us %0,%2,%1\";
1508  }"
1509  [(set_attr "type" "vecperm")])
1510
1511(define_insn "altivec_vpku<VI_char>um"
1512  [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1513	(unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1514			    (match_operand:VP 2 "register_operand" "v")]
1515			   UNSPEC_VPACK_UNS_UNS_MOD))]
1516  "<VI_unit>"
1517  "*
1518  {
1519    if (VECTOR_ELT_ORDER_BIG)
1520      return \"vpku<VI_char>um %0,%1,%2\";
1521    else
1522      return \"vpku<VI_char>um %0,%2,%1\";
1523  }"
1524  [(set_attr "type" "vecperm")])
1525
1526(define_insn "altivec_vpku<VI_char>um_direct"
1527  [(set (match_operand:<VP_small> 0 "register_operand" "=v")
1528	(unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
1529			    (match_operand:VP 2 "register_operand" "v")]
1530			   UNSPEC_VPACK_UNS_UNS_MOD_DIRECT))]
1531  "<VI_unit>"
1532  "*
1533  {
1534    if (BYTES_BIG_ENDIAN)
1535      return \"vpku<VI_char>um %0,%1,%2\";
1536    else
1537      return \"vpku<VI_char>um %0,%2,%1\";
1538  }"
1539  [(set_attr "type" "vecperm")])
1540
1541(define_insn "*altivec_vrl<VI_char>"
1542  [(set (match_operand:VI2 0 "register_operand" "=v")
1543        (rotate:VI2 (match_operand:VI2 1 "register_operand" "v")
1544		    (match_operand:VI2 2 "register_operand" "v")))]
1545  "<VI_unit>"
1546  "vrl<VI_char> %0,%1,%2"
1547  [(set_attr "type" "vecsimple")])
1548
1549(define_insn "altivec_vsl"
1550  [(set (match_operand:V4SI 0 "register_operand" "=v")
1551        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1552                      (match_operand:V4SI 2 "register_operand" "v")]
1553		     UNSPEC_VSLV4SI))]
1554  "TARGET_ALTIVEC"
1555  "vsl %0,%1,%2"
1556  [(set_attr "type" "vecperm")])
1557
1558(define_insn "altivec_vslo"
1559  [(set (match_operand:V4SI 0 "register_operand" "=v")
1560        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1561                      (match_operand:V4SI 2 "register_operand" "v")]
1562		     UNSPEC_VSLO))]
1563  "TARGET_ALTIVEC"
1564  "vslo %0,%1,%2"
1565  [(set_attr "type" "vecperm")])
1566
1567(define_insn "*altivec_vsl<VI_char>"
1568  [(set (match_operand:VI2 0 "register_operand" "=v")
1569        (ashift:VI2 (match_operand:VI2 1 "register_operand" "v")
1570		    (match_operand:VI2 2 "register_operand" "v")))]
1571  "<VI_unit>"
1572  "vsl<VI_char> %0,%1,%2"
1573  [(set_attr "type" "vecsimple")])
1574
1575(define_insn "*altivec_vsr<VI_char>"
1576  [(set (match_operand:VI2 0 "register_operand" "=v")
1577        (lshiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1578		      (match_operand:VI2 2 "register_operand" "v")))]
1579  "<VI_unit>"
1580  "vsr<VI_char> %0,%1,%2"
1581  [(set_attr "type" "vecsimple")])
1582
1583(define_insn "*altivec_vsra<VI_char>"
1584  [(set (match_operand:VI2 0 "register_operand" "=v")
1585        (ashiftrt:VI2 (match_operand:VI2 1 "register_operand" "v")
1586		      (match_operand:VI2 2 "register_operand" "v")))]
1587  "<VI_unit>"
1588  "vsra<VI_char> %0,%1,%2"
1589  [(set_attr "type" "vecsimple")])
1590
1591(define_insn "altivec_vsr"
1592  [(set (match_operand:V4SI 0 "register_operand" "=v")
1593        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1594                      (match_operand:V4SI 2 "register_operand" "v")]
1595		     UNSPEC_VSR))]
1596  "TARGET_ALTIVEC"
1597  "vsr %0,%1,%2"
1598  [(set_attr "type" "vecperm")])
1599
1600(define_insn "altivec_vsro"
1601  [(set (match_operand:V4SI 0 "register_operand" "=v")
1602        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1603                      (match_operand:V4SI 2 "register_operand" "v")]
1604		     UNSPEC_VSRO))]
1605  "TARGET_ALTIVEC"
1606  "vsro %0,%1,%2"
1607  [(set_attr "type" "vecperm")])
1608
1609(define_insn "altivec_vsum4ubs"
1610  [(set (match_operand:V4SI 0 "register_operand" "=v")
1611        (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")
1612                      (match_operand:V4SI 2 "register_operand" "v")]
1613		     UNSPEC_VSUM4UBS))
1614   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1615  "TARGET_ALTIVEC"
1616  "vsum4ubs %0,%1,%2"
1617  [(set_attr "type" "veccomplex")])
1618
1619(define_insn "altivec_vsum4s<VI_char>s"
1620  [(set (match_operand:V4SI 0 "register_operand" "=v")
1621        (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
1622                      (match_operand:V4SI 2 "register_operand" "v")]
1623		     UNSPEC_VSUM4S))
1624   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1625  "TARGET_ALTIVEC"
1626  "vsum4s<VI_char>s %0,%1,%2"
1627  [(set_attr "type" "veccomplex")])
1628
1629;; FIXME: For the following two patterns, the scratch should only be
1630;; allocated for !VECTOR_ELT_ORDER_BIG, and the instructions should
1631;; be emitted separately.
1632(define_insn "altivec_vsum2sws"
1633  [(set (match_operand:V4SI 0 "register_operand" "=v")
1634        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1635                      (match_operand:V4SI 2 "register_operand" "v")]
1636		     UNSPEC_VSUM2SWS))
1637   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))
1638   (clobber (match_scratch:V4SI 3 "=v"))]
1639  "TARGET_ALTIVEC"
1640{
1641  if (VECTOR_ELT_ORDER_BIG)
1642    return "vsum2sws %0,%1,%2";
1643  else
1644    return "vsldoi %3,%2,%2,12\n\tvsum2sws %3,%1,%3\n\tvsldoi %0,%3,%3,4";
1645}
1646  [(set_attr "type" "veccomplex")
1647   (set (attr "length")
1648     (if_then_else
1649       (match_test "VECTOR_ELT_ORDER_BIG")
1650       (const_string "4")
1651       (const_string "12")))])
1652
1653(define_insn "altivec_vsumsws"
1654  [(set (match_operand:V4SI 0 "register_operand" "=v")
1655        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1656                      (match_operand:V4SI 2 "register_operand" "v")]
1657		     UNSPEC_VSUMSWS))
1658   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))
1659   (clobber (match_scratch:V4SI 3 "=v"))]
1660  "TARGET_ALTIVEC"
1661{
1662  if (VECTOR_ELT_ORDER_BIG)
1663    return "vsumsws %0,%1,%2";
1664  else
1665    return "vspltw %3,%2,0\n\tvsumsws %3,%1,%3\n\tvsldoi %0,%3,%3,12";
1666}
1667  [(set_attr "type" "veccomplex")
1668   (set (attr "length")
1669     (if_then_else
1670       (match_test "(VECTOR_ELT_ORDER_BIG)")
1671       (const_string "4")
1672       (const_string "12")))])
1673
1674(define_insn "altivec_vsumsws_direct"
1675  [(set (match_operand:V4SI 0 "register_operand" "=v")
1676        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1677                      (match_operand:V4SI 2 "register_operand" "v")]
1678		     UNSPEC_VSUMSWS_DIRECT))
1679   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
1680  "TARGET_ALTIVEC"
1681  "vsumsws %0,%1,%2"
1682  [(set_attr "type" "veccomplex")])
1683
1684(define_expand "altivec_vspltb"
1685  [(use (match_operand:V16QI 0 "register_operand" ""))
1686   (use (match_operand:V16QI 1 "register_operand" ""))
1687   (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1688  "TARGET_ALTIVEC"
1689{
1690  rtvec v;
1691  rtx x;
1692
1693  /* Special handling for LE with -maltivec=be.  We have to reflect
1694     the actual selected index for the splat in the RTL.  */
1695  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1696    operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1697
1698  v = gen_rtvec (1, operands[2]);
1699  x = gen_rtx_VEC_SELECT (QImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1700  x = gen_rtx_VEC_DUPLICATE (V16QImode, x);
1701  emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1702  DONE;
1703})
1704
1705(define_insn "*altivec_vspltb_internal"
1706  [(set (match_operand:V16QI 0 "register_operand" "=v")
1707        (vec_duplicate:V16QI
1708	 (vec_select:QI (match_operand:V16QI 1 "register_operand" "v")
1709			(parallel
1710			 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1711  "TARGET_ALTIVEC"
1712{
1713  /* For true LE, this adjusts the selected index.  For LE with
1714     -maltivec=be, this reverses what was done in the define_expand
1715     because the instruction already has big-endian bias.  */
1716  if (!BYTES_BIG_ENDIAN)
1717    operands[2] = GEN_INT (15 - INTVAL (operands[2]));
1718
1719  return "vspltb %0,%1,%2";
1720}
1721  [(set_attr "type" "vecperm")])
1722
1723(define_insn "altivec_vspltb_direct"
1724  [(set (match_operand:V16QI 0 "register_operand" "=v")
1725        (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
1726	               (match_operand:QI 2 "u5bit_cint_operand" "i")]
1727                      UNSPEC_VSPLT_DIRECT))]
1728  "TARGET_ALTIVEC"
1729  "vspltb %0,%1,%2"
1730  [(set_attr "type" "vecperm")])
1731
1732(define_expand "altivec_vsplth"
1733  [(use (match_operand:V8HI 0 "register_operand" ""))
1734   (use (match_operand:V8HI 1 "register_operand" ""))
1735   (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1736  "TARGET_ALTIVEC"
1737{
1738  rtvec v;
1739  rtx x;
1740
1741  /* Special handling for LE with -maltivec=be.  We have to reflect
1742     the actual selected index for the splat in the RTL.  */
1743  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1744    operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1745
1746  v = gen_rtvec (1, operands[2]);
1747  x = gen_rtx_VEC_SELECT (HImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1748  x = gen_rtx_VEC_DUPLICATE (V8HImode, x);
1749  emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1750  DONE;
1751})
1752
1753(define_insn "*altivec_vsplth_internal"
1754  [(set (match_operand:V8HI 0 "register_operand" "=v")
1755	(vec_duplicate:V8HI
1756	 (vec_select:HI (match_operand:V8HI 1 "register_operand" "v")
1757			(parallel
1758			 [(match_operand:QI 2 "u5bit_cint_operand" "")]))))]
1759  "TARGET_ALTIVEC"
1760{
1761  /* For true LE, this adjusts the selected index.  For LE with
1762     -maltivec=be, this reverses what was done in the define_expand
1763     because the instruction already has big-endian bias.  */
1764  if (!BYTES_BIG_ENDIAN)
1765    operands[2] = GEN_INT (7 - INTVAL (operands[2]));
1766
1767  return "vsplth %0,%1,%2";
1768}
1769  [(set_attr "type" "vecperm")])
1770
1771(define_insn "altivec_vsplth_direct"
1772  [(set (match_operand:V8HI 0 "register_operand" "=v")
1773        (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")
1774                      (match_operand:QI 2 "u5bit_cint_operand" "i")]
1775                     UNSPEC_VSPLT_DIRECT))]
1776  "TARGET_ALTIVEC"
1777  "vsplth %0,%1,%2"
1778  [(set_attr "type" "vecperm")])
1779
1780(define_expand "altivec_vspltw"
1781  [(use (match_operand:V4SI 0 "register_operand" ""))
1782   (use (match_operand:V4SI 1 "register_operand" ""))
1783   (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1784  "TARGET_ALTIVEC"
1785{
1786  rtvec v;
1787  rtx x;
1788
1789  /* Special handling for LE with -maltivec=be.  We have to reflect
1790     the actual selected index for the splat in the RTL.  */
1791  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1792    operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1793
1794  v = gen_rtvec (1, operands[2]);
1795  x = gen_rtx_VEC_SELECT (SImode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1796  x = gen_rtx_VEC_DUPLICATE (V4SImode, x);
1797  emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1798  DONE;
1799})
1800
1801(define_insn "*altivec_vspltw_internal"
1802  [(set (match_operand:V4SI 0 "register_operand" "=v")
1803	(vec_duplicate:V4SI
1804	 (vec_select:SI (match_operand:V4SI 1 "register_operand" "v")
1805			(parallel
1806			 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1807  "TARGET_ALTIVEC"
1808{
1809  /* For true LE, this adjusts the selected index.  For LE with
1810     -maltivec=be, this reverses what was done in the define_expand
1811     because the instruction already has big-endian bias.  */
1812  if (!BYTES_BIG_ENDIAN)
1813    operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1814
1815  return "vspltw %0,%1,%2";
1816}
1817  [(set_attr "type" "vecperm")])
1818
1819(define_insn "altivec_vspltw_direct"
1820  [(set (match_operand:V4SI 0 "register_operand" "=v")
1821        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")
1822                      (match_operand:QI 2 "u5bit_cint_operand" "i")]
1823                     UNSPEC_VSPLT_DIRECT))]
1824  "TARGET_ALTIVEC"
1825  "vspltw %0,%1,%2"
1826  [(set_attr "type" "vecperm")])
1827
1828(define_expand "altivec_vspltsf"
1829  [(use (match_operand:V4SF 0 "register_operand" ""))
1830   (use (match_operand:V4SF 1 "register_operand" ""))
1831   (use (match_operand:QI 2 "u5bit_cint_operand" ""))]
1832  "TARGET_ALTIVEC"
1833{
1834  rtvec v;
1835  rtx x;
1836
1837  /* Special handling for LE with -maltivec=be.  We have to reflect
1838     the actual selected index for the splat in the RTL.  */
1839  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
1840    operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1841
1842  v = gen_rtvec (1, operands[2]);
1843  x = gen_rtx_VEC_SELECT (SFmode, operands[1], gen_rtx_PARALLEL (VOIDmode, v));
1844  x = gen_rtx_VEC_DUPLICATE (V4SFmode, x);
1845  emit_insn (gen_rtx_SET (VOIDmode, operands[0], x));
1846  DONE;
1847})
1848
1849(define_insn "*altivec_vspltsf_internal"
1850  [(set (match_operand:V4SF 0 "register_operand" "=v")
1851	(vec_duplicate:V4SF
1852	 (vec_select:SF (match_operand:V4SF 1 "register_operand" "v")
1853			(parallel
1854			 [(match_operand:QI 2 "u5bit_cint_operand" "i")]))))]
1855  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1856{
1857  /* For true LE, this adjusts the selected index.  For LE with
1858     -maltivec=be, this reverses what was done in the define_expand
1859     because the instruction already has big-endian bias.  */
1860  if (!BYTES_BIG_ENDIAN)
1861    operands[2] = GEN_INT (3 - INTVAL (operands[2]));
1862
1863  return "vspltw %0,%1,%2";
1864}
1865  [(set_attr "type" "vecperm")])
1866
1867(define_insn "altivec_vspltis<VI_char>"
1868  [(set (match_operand:VI 0 "register_operand" "=v")
1869	(vec_duplicate:VI
1870	 (match_operand:QI 1 "s5bit_cint_operand" "i")))]
1871  "TARGET_ALTIVEC"
1872  "vspltis<VI_char> %0,%1"
1873  [(set_attr "type" "vecperm")])
1874
1875(define_insn "*altivec_vrfiz"
1876  [(set (match_operand:V4SF 0 "register_operand" "=v")
1877	(fix:V4SF (match_operand:V4SF 1 "register_operand" "v")))]
1878  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
1879  "vrfiz %0,%1"
1880  [(set_attr "type" "vecfloat")])
1881
1882(define_expand "altivec_vperm_<mode>"
1883  [(set (match_operand:VM 0 "register_operand" "=v")
1884	(unspec:VM [(match_operand:VM 1 "register_operand" "v")
1885		    (match_operand:VM 2 "register_operand" "v")
1886		    (match_operand:V16QI 3 "register_operand" "v")]
1887		   UNSPEC_VPERM))]
1888  "TARGET_ALTIVEC"
1889{
1890  if (!VECTOR_ELT_ORDER_BIG)
1891    {
1892      altivec_expand_vec_perm_le (operands);
1893      DONE;
1894    }
1895})
1896
1897(define_insn "*altivec_vperm_<mode>_internal"
1898  [(set (match_operand:VM 0 "register_operand" "=v")
1899	(unspec:VM [(match_operand:VM 1 "register_operand" "v")
1900		    (match_operand:VM 2 "register_operand" "v")
1901		    (match_operand:V16QI 3 "register_operand" "v")]
1902		   UNSPEC_VPERM))]
1903  "TARGET_ALTIVEC"
1904  "vperm %0,%1,%2,%3"
1905  [(set_attr "type" "vecperm")])
1906
1907(define_expand "altivec_vperm_<mode>_uns"
1908  [(set (match_operand:VM 0 "register_operand" "=v")
1909	(unspec:VM [(match_operand:VM 1 "register_operand" "v")
1910		    (match_operand:VM 2 "register_operand" "v")
1911		    (match_operand:V16QI 3 "register_operand" "v")]
1912		   UNSPEC_VPERM_UNS))]
1913  "TARGET_ALTIVEC"
1914{
1915  if (!VECTOR_ELT_ORDER_BIG)
1916    {
1917      altivec_expand_vec_perm_le (operands);
1918      DONE;
1919    }
1920})
1921
1922(define_insn "*altivec_vperm_<mode>_uns_internal"
1923  [(set (match_operand:VM 0 "register_operand" "=v")
1924	(unspec:VM [(match_operand:VM 1 "register_operand" "v")
1925		    (match_operand:VM 2 "register_operand" "v")
1926		    (match_operand:V16QI 3 "register_operand" "v")]
1927		   UNSPEC_VPERM_UNS))]
1928  "TARGET_ALTIVEC"
1929  "vperm %0,%1,%2,%3"
1930  [(set_attr "type" "vecperm")])
1931
1932(define_expand "vec_permv16qi"
1933  [(set (match_operand:V16QI 0 "register_operand" "")
1934	(unspec:V16QI [(match_operand:V16QI 1 "register_operand" "")
1935		       (match_operand:V16QI 2 "register_operand" "")
1936		       (match_operand:V16QI 3 "register_operand" "")]
1937		      UNSPEC_VPERM))]
1938  "TARGET_ALTIVEC"
1939{
1940  if (!BYTES_BIG_ENDIAN) {
1941    altivec_expand_vec_perm_le (operands);
1942    DONE;
1943  }
1944})
1945
1946(define_expand "vec_perm_constv16qi"
1947  [(match_operand:V16QI 0 "register_operand" "")
1948   (match_operand:V16QI 1 "register_operand" "")
1949   (match_operand:V16QI 2 "register_operand" "")
1950   (match_operand:V16QI 3 "" "")]
1951  "TARGET_ALTIVEC"
1952{
1953  if (altivec_expand_vec_perm_const (operands))
1954    DONE;
1955  else
1956    FAIL;
1957})
1958
1959(define_insn "altivec_vrfip"		; ceil
1960  [(set (match_operand:V4SF 0 "register_operand" "=v")
1961        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
1962		     UNSPEC_FRIP))]
1963  "TARGET_ALTIVEC"
1964  "vrfip %0,%1"
1965  [(set_attr "type" "vecfloat")])
1966
1967(define_insn "altivec_vrfin"
1968  [(set (match_operand:V4SF 0 "register_operand" "=v")
1969        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
1970		     UNSPEC_VRFIN))]
1971  "TARGET_ALTIVEC"
1972  "vrfin %0,%1"
1973  [(set_attr "type" "vecfloat")])
1974
1975(define_insn "*altivec_vrfim"		; floor
1976  [(set (match_operand:V4SF 0 "register_operand" "=v")
1977        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
1978		     UNSPEC_FRIM))]
1979  "TARGET_ALTIVEC"
1980  "vrfim %0,%1"
1981  [(set_attr "type" "vecfloat")])
1982
1983(define_insn "altivec_vcfux"
1984  [(set (match_operand:V4SF 0 "register_operand" "=v")
1985        (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
1986	              (match_operand:QI 2 "immediate_operand" "i")]
1987		     UNSPEC_VCFUX))]
1988  "TARGET_ALTIVEC"
1989  "vcfux %0,%1,%2"
1990  [(set_attr "type" "vecfloat")])
1991
1992(define_insn "altivec_vcfsx"
1993  [(set (match_operand:V4SF 0 "register_operand" "=v")
1994        (unspec:V4SF [(match_operand:V4SI 1 "register_operand" "v")
1995	              (match_operand:QI 2 "immediate_operand" "i")]
1996		     UNSPEC_VCFSX))]
1997  "TARGET_ALTIVEC"
1998  "vcfsx %0,%1,%2"
1999  [(set_attr "type" "vecfloat")])
2000
2001(define_insn "altivec_vctuxs"
2002  [(set (match_operand:V4SI 0 "register_operand" "=v")
2003        (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2004                      (match_operand:QI 2 "immediate_operand" "i")]
2005		     UNSPEC_VCTUXS))
2006   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2007  "TARGET_ALTIVEC"
2008  "vctuxs %0,%1,%2"
2009  [(set_attr "type" "vecfloat")])
2010
2011(define_insn "altivec_vctsxs"
2012  [(set (match_operand:V4SI 0 "register_operand" "=v")
2013        (unspec:V4SI [(match_operand:V4SF 1 "register_operand" "v")
2014                      (match_operand:QI 2 "immediate_operand" "i")]
2015		     UNSPEC_VCTSXS))
2016   (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))]
2017  "TARGET_ALTIVEC"
2018  "vctsxs %0,%1,%2"
2019  [(set_attr "type" "vecfloat")])
2020
2021(define_insn "altivec_vlogefp"
2022  [(set (match_operand:V4SF 0 "register_operand" "=v")
2023        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2024		     UNSPEC_VLOGEFP))]
2025  "TARGET_ALTIVEC"
2026  "vlogefp %0,%1"
2027  [(set_attr "type" "vecfloat")])
2028
2029(define_insn "altivec_vexptefp"
2030  [(set (match_operand:V4SF 0 "register_operand" "=v")
2031        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2032		     UNSPEC_VEXPTEFP))]
2033  "TARGET_ALTIVEC"
2034  "vexptefp %0,%1"
2035  [(set_attr "type" "vecfloat")])
2036
2037(define_insn "*altivec_vrsqrtefp"
2038  [(set (match_operand:V4SF 0 "register_operand" "=v")
2039        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2040		     UNSPEC_RSQRT))]
2041  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2042  "vrsqrtefp %0,%1"
2043  [(set_attr "type" "vecfloat")])
2044
2045(define_insn "altivec_vrefp"
2046  [(set (match_operand:V4SF 0 "register_operand" "=v")
2047        (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v")]
2048		     UNSPEC_FRES))]
2049  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2050  "vrefp %0,%1"
2051  [(set_attr "type" "vecfloat")])
2052
2053(define_expand "altivec_copysign_v4sf3"
2054  [(use (match_operand:V4SF 0 "register_operand" ""))
2055   (use (match_operand:V4SF 1 "register_operand" ""))
2056   (use (match_operand:V4SF 2 "register_operand" ""))]
2057  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2058  "
2059{
2060  rtx mask = gen_reg_rtx (V4SImode);
2061  rtvec v = rtvec_alloc (4);
2062  unsigned HOST_WIDE_INT mask_val = ((unsigned HOST_WIDE_INT)1) << 31;
2063
2064  RTVEC_ELT (v, 0) = GEN_INT (mask_val);
2065  RTVEC_ELT (v, 1) = GEN_INT (mask_val);
2066  RTVEC_ELT (v, 2) = GEN_INT (mask_val);
2067  RTVEC_ELT (v, 3) = GEN_INT (mask_val);
2068
2069  emit_insn (gen_vec_initv4si (mask, gen_rtx_PARALLEL (V4SImode, v)));
2070  emit_insn (gen_vector_select_v4sf (operands[0], operands[1], operands[2],
2071				     gen_lowpart (V4SFmode, mask)));
2072  DONE;
2073}")
2074
2075(define_insn "altivec_vsldoi_<mode>"
2076  [(set (match_operand:VM 0 "register_operand" "=v")
2077        (unspec:VM [(match_operand:VM 1 "register_operand" "v")
2078		    (match_operand:VM 2 "register_operand" "v")
2079		    (match_operand:QI 3 "immediate_operand" "i")]
2080		  UNSPEC_VLSDOI))]
2081  "TARGET_ALTIVEC"
2082  "vsldoi %0,%1,%2,%3"
2083  [(set_attr "type" "vecperm")])
2084
2085(define_insn "altivec_vupkhs<VU_char>"
2086  [(set (match_operand:VP 0 "register_operand" "=v")
2087	(unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2088		     UNSPEC_VUNPACK_HI_SIGN))]
2089  "<VI_unit>"
2090{
2091  if (VECTOR_ELT_ORDER_BIG)
2092    return "vupkhs<VU_char> %0,%1";
2093  else
2094    return "vupkls<VU_char> %0,%1";
2095}
2096  [(set_attr "type" "vecperm")])
2097
2098(define_insn "*altivec_vupkhs<VU_char>_direct"
2099  [(set (match_operand:VP 0 "register_operand" "=v")
2100	(unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2101		     UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2102  "<VI_unit>"
2103  "vupkhs<VU_char> %0,%1"
2104  [(set_attr "type" "vecperm")])
2105
2106(define_insn "altivec_vupkls<VU_char>"
2107  [(set (match_operand:VP 0 "register_operand" "=v")
2108	(unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2109		     UNSPEC_VUNPACK_LO_SIGN))]
2110  "<VI_unit>"
2111{
2112  if (VECTOR_ELT_ORDER_BIG)
2113    return "vupkls<VU_char> %0,%1";
2114  else
2115    return "vupkhs<VU_char> %0,%1";
2116}
2117  [(set_attr "type" "vecperm")])
2118
2119(define_insn "*altivec_vupkls<VU_char>_direct"
2120  [(set (match_operand:VP 0 "register_operand" "=v")
2121	(unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2122		     UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2123  "<VI_unit>"
2124  "vupkls<VU_char> %0,%1"
2125  [(set_attr "type" "vecperm")])
2126
2127(define_insn "altivec_vupkhpx"
2128  [(set (match_operand:V4SI 0 "register_operand" "=v")
2129	(unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2130		     UNSPEC_VUPKHPX))]
2131  "TARGET_ALTIVEC"
2132{
2133  if (VECTOR_ELT_ORDER_BIG)
2134    return "vupkhpx %0,%1";
2135  else
2136    return "vupklpx %0,%1";
2137}
2138  [(set_attr "type" "vecperm")])
2139
2140(define_insn "altivec_vupklpx"
2141  [(set (match_operand:V4SI 0 "register_operand" "=v")
2142	(unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2143		     UNSPEC_VUPKLPX))]
2144  "TARGET_ALTIVEC"
2145{
2146  if (VECTOR_ELT_ORDER_BIG)
2147    return "vupklpx %0,%1";
2148  else
2149    return "vupkhpx %0,%1";
2150}
2151  [(set_attr "type" "vecperm")])
2152
2153;; Compare vectors producing a vector result and a predicate, setting CR6 to
2154;; indicate a combined status
2155(define_insn "*altivec_vcmpequ<VI_char>_p"
2156  [(set (reg:CC 74)
2157	(unspec:CC [(eq:CC (match_operand:VI2 1 "register_operand" "v")
2158			   (match_operand:VI2 2 "register_operand" "v"))]
2159		   UNSPEC_PREDICATE))
2160   (set (match_operand:VI2 0 "register_operand" "=v")
2161	(eq:VI2 (match_dup 1)
2162		(match_dup 2)))]
2163  "<VI_unit>"
2164  "vcmpequ<VI_char>. %0,%1,%2"
2165  [(set_attr "type" "veccmp")])
2166
2167(define_insn "*altivec_vcmpgts<VI_char>_p"
2168  [(set (reg:CC 74)
2169	(unspec:CC [(gt:CC (match_operand:VI2 1 "register_operand" "v")
2170			   (match_operand:VI2 2 "register_operand" "v"))]
2171		   UNSPEC_PREDICATE))
2172   (set (match_operand:VI2 0 "register_operand" "=v")
2173	(gt:VI2 (match_dup 1)
2174		(match_dup 2)))]
2175  "<VI_unit>"
2176  "vcmpgts<VI_char>. %0,%1,%2"
2177  [(set_attr "type" "veccmp")])
2178
2179(define_insn "*altivec_vcmpgtu<VI_char>_p"
2180  [(set (reg:CC 74)
2181	(unspec:CC [(gtu:CC (match_operand:VI2 1 "register_operand" "v")
2182			    (match_operand:VI2 2 "register_operand" "v"))]
2183		   UNSPEC_PREDICATE))
2184   (set (match_operand:VI2 0 "register_operand" "=v")
2185	(gtu:VI2 (match_dup 1)
2186		 (match_dup 2)))]
2187  "<VI_unit>"
2188  "vcmpgtu<VI_char>. %0,%1,%2"
2189  [(set_attr "type" "veccmp")])
2190
2191(define_insn "*altivec_vcmpeqfp_p"
2192  [(set (reg:CC 74)
2193	(unspec:CC [(eq:CC (match_operand:V4SF 1 "register_operand" "v")
2194			   (match_operand:V4SF 2 "register_operand" "v"))]
2195		   UNSPEC_PREDICATE))
2196   (set (match_operand:V4SF 0 "register_operand" "=v")
2197	(eq:V4SF (match_dup 1)
2198		 (match_dup 2)))]
2199  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2200  "vcmpeqfp. %0,%1,%2"
2201  [(set_attr "type" "veccmp")])
2202
2203(define_insn "*altivec_vcmpgtfp_p"
2204  [(set (reg:CC 74)
2205	(unspec:CC [(gt:CC (match_operand:V4SF 1 "register_operand" "v")
2206			   (match_operand:V4SF 2 "register_operand" "v"))]
2207		   UNSPEC_PREDICATE))
2208   (set (match_operand:V4SF 0 "register_operand" "=v")
2209	(gt:V4SF (match_dup 1)
2210		 (match_dup 2)))]
2211  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2212  "vcmpgtfp. %0,%1,%2"
2213  [(set_attr "type" "veccmp")])
2214
2215(define_insn "*altivec_vcmpgefp_p"
2216  [(set (reg:CC 74)
2217	(unspec:CC [(ge:CC (match_operand:V4SF 1 "register_operand" "v")
2218			   (match_operand:V4SF 2 "register_operand" "v"))]
2219		   UNSPEC_PREDICATE))
2220   (set (match_operand:V4SF 0 "register_operand" "=v")
2221	(ge:V4SF (match_dup 1)
2222		 (match_dup 2)))]
2223  "VECTOR_UNIT_ALTIVEC_P (V4SFmode)"
2224  "vcmpgefp. %0,%1,%2"
2225  [(set_attr "type" "veccmp")])
2226
2227(define_insn "altivec_vcmpbfp_p"
2228  [(set (reg:CC 74)
2229	(unspec:CC [(match_operand:V4SF 1 "register_operand" "v")
2230		    (match_operand:V4SF 2 "register_operand" "v")]
2231		   UNSPEC_VCMPBFP))
2232   (set (match_operand:V4SF 0 "register_operand" "=v")
2233        (unspec:V4SF [(match_dup 1)
2234                      (match_dup 2)]
2235                      UNSPEC_VCMPBFP))]
2236  "VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)"
2237  "vcmpbfp. %0,%1,%2"
2238  [(set_attr "type" "veccmp")])
2239
2240(define_insn "altivec_mtvscr"
2241  [(set (reg:SI 110)
2242	(unspec_volatile:SI
2243	 [(match_operand:V4SI 0 "register_operand" "v")] UNSPECV_MTVSCR))]
2244  "TARGET_ALTIVEC"
2245  "mtvscr %0"
2246  [(set_attr "type" "vecsimple")])
2247
2248(define_insn "altivec_mfvscr"
2249  [(set (match_operand:V8HI 0 "register_operand" "=v")
2250	(unspec_volatile:V8HI [(reg:SI 110)] UNSPECV_MFVSCR))]
2251  "TARGET_ALTIVEC"
2252  "mfvscr %0"
2253  [(set_attr "type" "vecsimple")])
2254
2255(define_insn "altivec_dssall"
2256  [(unspec_volatile [(const_int 0)] UNSPECV_DSSALL)]
2257  "TARGET_ALTIVEC"
2258  "dssall"
2259  [(set_attr "type" "vecsimple")])
2260
2261(define_insn "altivec_dss"
2262  [(unspec_volatile [(match_operand:QI 0 "immediate_operand" "i")]
2263		    UNSPECV_DSS)]
2264  "TARGET_ALTIVEC"
2265  "dss %0"
2266  [(set_attr "type" "vecsimple")])
2267
2268(define_insn "altivec_dst"
2269  [(unspec [(match_operand 0 "register_operand" "b")
2270	    (match_operand:SI 1 "register_operand" "r")
2271	    (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DST)]
2272  "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2273  "dst %0,%1,%2"
2274  [(set_attr "type" "vecsimple")])
2275
2276(define_insn "altivec_dstt"
2277  [(unspec [(match_operand 0 "register_operand" "b")
2278	    (match_operand:SI 1 "register_operand" "r")
2279	    (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTT)]
2280  "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2281  "dstt %0,%1,%2"
2282  [(set_attr "type" "vecsimple")])
2283
2284(define_insn "altivec_dstst"
2285  [(unspec [(match_operand 0 "register_operand" "b")
2286	    (match_operand:SI 1 "register_operand" "r")
2287	    (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTST)]
2288  "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2289  "dstst %0,%1,%2"
2290  [(set_attr "type" "vecsimple")])
2291
2292(define_insn "altivec_dststt"
2293  [(unspec [(match_operand 0 "register_operand" "b")
2294	    (match_operand:SI 1 "register_operand" "r")
2295	    (match_operand:QI 2 "immediate_operand" "i")] UNSPEC_DSTSTT)]
2296  "TARGET_ALTIVEC && GET_MODE (operands[0]) == Pmode"
2297  "dststt %0,%1,%2"
2298  [(set_attr "type" "vecsimple")])
2299
2300(define_expand "altivec_lvsl"
2301  [(use (match_operand:V16QI 0 "register_operand" ""))
2302   (use (match_operand:V16QI 1 "memory_operand" ""))]
2303  "TARGET_ALTIVEC"
2304{
2305  if (VECTOR_ELT_ORDER_BIG)
2306    emit_insn (gen_altivec_lvsl_direct (operands[0], operands[1]));
2307  else
2308    {
2309      int i;
2310      rtx mask, perm[16], constv, vperm;
2311      mask = gen_reg_rtx (V16QImode);
2312      emit_insn (gen_altivec_lvsl_direct (mask, operands[1]));
2313      for (i = 0; i < 16; ++i)
2314        perm[i] = GEN_INT (i);
2315      constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2316      constv = force_reg (V16QImode, constv);
2317      vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2318                              UNSPEC_VPERM);
2319      emit_insn (gen_rtx_SET (VOIDmode, operands[0], vperm));
2320    }
2321  DONE;
2322})
2323
2324(define_insn "altivec_lvsl_direct"
2325  [(set (match_operand:V16QI 0 "register_operand" "=v")
2326	(unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2327		      UNSPEC_LVSL))]
2328  "TARGET_ALTIVEC"
2329  "lvsl %0,%y1"
2330  [(set_attr "type" "vecload")])
2331
2332(define_expand "altivec_lvsr"
2333  [(use (match_operand:V16QI 0 "register_operand" ""))
2334   (use (match_operand:V16QI 1 "memory_operand" ""))]
2335  "TARGET_ALTIVEC"
2336{
2337  if (VECTOR_ELT_ORDER_BIG)
2338    emit_insn (gen_altivec_lvsr_direct (operands[0], operands[1]));
2339  else
2340    {
2341      int i;
2342      rtx mask, perm[16], constv, vperm;
2343      mask = gen_reg_rtx (V16QImode);
2344      emit_insn (gen_altivec_lvsr_direct (mask, operands[1]));
2345      for (i = 0; i < 16; ++i)
2346        perm[i] = GEN_INT (i);
2347      constv = gen_rtx_CONST_VECTOR (V16QImode, gen_rtvec_v (16, perm));
2348      constv = force_reg (V16QImode, constv);
2349      vperm = gen_rtx_UNSPEC (V16QImode, gen_rtvec (3, mask, mask, constv),
2350                              UNSPEC_VPERM);
2351      emit_insn (gen_rtx_SET (VOIDmode, operands[0], vperm));
2352    }
2353  DONE;
2354})
2355
2356(define_insn "altivec_lvsr_direct"
2357  [(set (match_operand:V16QI 0 "register_operand" "=v")
2358	(unspec:V16QI [(match_operand:V16QI 1 "memory_operand" "Z")]
2359		      UNSPEC_LVSR))]
2360  "TARGET_ALTIVEC"
2361  "lvsr %0,%y1"
2362  [(set_attr "type" "vecload")])
2363
2364(define_expand "build_vector_mask_for_load"
2365  [(set (match_operand:V16QI 0 "register_operand" "")
2366	(unspec:V16QI [(match_operand 1 "memory_operand" "")] UNSPEC_LVSR))]
2367  "TARGET_ALTIVEC"
2368  "
2369{
2370  rtx addr;
2371  rtx temp;
2372
2373  gcc_assert (GET_CODE (operands[1]) == MEM);
2374
2375  addr = XEXP (operands[1], 0);
2376  temp = gen_reg_rtx (GET_MODE (addr));
2377  emit_insn (gen_rtx_SET (VOIDmode, temp,
2378			  gen_rtx_NEG (GET_MODE (addr), addr)));
2379  emit_insn (gen_altivec_lvsr (operands[0],
2380			       replace_equiv_address (operands[1], temp)));
2381  DONE;
2382}")
2383
2384;; Parallel some of the LVE* and STV*'s with unspecs because some have
2385;; identical rtl but different instructions-- and gcc gets confused.
2386
2387(define_expand "altivec_lve<VI_char>x"
2388  [(parallel
2389    [(set (match_operand:VI 0 "register_operand" "=v")
2390	  (match_operand:VI 1 "memory_operand" "Z"))
2391     (unspec [(const_int 0)] UNSPEC_LVE)])]
2392  "TARGET_ALTIVEC"
2393{
2394  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2395    {
2396      altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_LVE);
2397      DONE;
2398    }
2399})
2400
2401(define_insn "*altivec_lve<VI_char>x_internal"
2402  [(parallel
2403    [(set (match_operand:VI 0 "register_operand" "=v")
2404	  (match_operand:VI 1 "memory_operand" "Z"))
2405     (unspec [(const_int 0)] UNSPEC_LVE)])]
2406  "TARGET_ALTIVEC"
2407  "lve<VI_char>x %0,%y1"
2408  [(set_attr "type" "vecload")])
2409
2410(define_insn "*altivec_lvesfx"
2411  [(parallel
2412    [(set (match_operand:V4SF 0 "register_operand" "=v")
2413	  (match_operand:V4SF 1 "memory_operand" "Z"))
2414     (unspec [(const_int 0)] UNSPEC_LVE)])]
2415  "TARGET_ALTIVEC"
2416  "lvewx %0,%y1"
2417  [(set_attr "type" "vecload")])
2418
2419(define_expand "altivec_lvxl_<mode>"
2420  [(parallel
2421    [(set (match_operand:VM2 0 "register_operand" "=v")
2422	  (match_operand:VM2 1 "memory_operand" "Z"))
2423     (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2424  "TARGET_ALTIVEC"
2425{
2426  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2427    {
2428      altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_SET_VSCR);
2429      DONE;
2430    }
2431})
2432
2433(define_insn "*altivec_lvxl_<mode>_internal"
2434  [(parallel
2435    [(set (match_operand:VM2 0 "register_operand" "=v")
2436	  (match_operand:VM2 1 "memory_operand" "Z"))
2437     (unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
2438  "TARGET_ALTIVEC"
2439  "lvx %0,%y1"
2440  [(set_attr "type" "vecload")])
2441
2442(define_expand "altivec_lvx_<mode>"
2443  [(parallel
2444    [(set (match_operand:VM2 0 "register_operand" "=v")
2445	  (match_operand:VM2 1 "memory_operand" "Z"))
2446     (unspec [(const_int 0)] UNSPEC_LVX)])]
2447  "TARGET_ALTIVEC"
2448{
2449  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2450    {
2451      altivec_expand_lvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_LVX);
2452      DONE;
2453    }
2454})
2455
2456(define_insn "*altivec_lvx_<mode>_internal"
2457  [(parallel
2458    [(set (match_operand:VM2 0 "register_operand" "=v")
2459	  (match_operand:VM2 1 "memory_operand" "Z"))
2460     (unspec [(const_int 0)] UNSPEC_LVX)])]
2461  "TARGET_ALTIVEC"
2462  "lvx %0,%y1"
2463  [(set_attr "type" "vecload")])
2464
2465(define_expand "altivec_stvx_<mode>"
2466  [(parallel
2467    [(set (match_operand:VM2 0 "memory_operand" "=Z")
2468	  (match_operand:VM2 1 "register_operand" "v"))
2469     (unspec [(const_int 0)] UNSPEC_STVX)])]
2470  "TARGET_ALTIVEC"
2471{
2472  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2473    {
2474      altivec_expand_stvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVX);
2475      DONE;
2476    }
2477})
2478
2479(define_insn "*altivec_stvx_<mode>_internal"
2480  [(parallel
2481    [(set (match_operand:VM2 0 "memory_operand" "=Z")
2482	  (match_operand:VM2 1 "register_operand" "v"))
2483     (unspec [(const_int 0)] UNSPEC_STVX)])]
2484  "TARGET_ALTIVEC"
2485  "stvx %1,%y0"
2486  [(set_attr "type" "vecstore")])
2487
2488(define_expand "altivec_stvxl_<mode>"
2489  [(parallel
2490    [(set (match_operand:VM2 0 "memory_operand" "=Z")
2491	  (match_operand:VM2 1 "register_operand" "v"))
2492     (unspec [(const_int 0)] UNSPEC_STVXL)])]
2493  "TARGET_ALTIVEC"
2494{
2495  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2496    {
2497      altivec_expand_stvx_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVXL);
2498      DONE;
2499    }
2500})
2501
2502(define_insn "*altivec_stvxl_<mode>_internal"
2503  [(parallel
2504    [(set (match_operand:VM2 0 "memory_operand" "=Z")
2505	  (match_operand:VM2 1 "register_operand" "v"))
2506     (unspec [(const_int 0)] UNSPEC_STVXL)])]
2507  "TARGET_ALTIVEC"
2508  "stvxl %1,%y0"
2509  [(set_attr "type" "vecstore")])
2510
2511(define_expand "altivec_stve<VI_char>x"
2512  [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2513	(unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2514  "TARGET_ALTIVEC"
2515{
2516  if (!BYTES_BIG_ENDIAN && VECTOR_ELT_ORDER_BIG)
2517    {
2518      altivec_expand_stvex_be (operands[0], operands[1], <MODE>mode, UNSPEC_STVE);
2519      DONE;
2520    }
2521})
2522
2523(define_insn "*altivec_stve<VI_char>x_internal"
2524  [(set (match_operand:<VI_scalar> 0 "memory_operand" "=Z")
2525	(unspec:<VI_scalar> [(match_operand:VI 1 "register_operand" "v")] UNSPEC_STVE))]
2526  "TARGET_ALTIVEC"
2527  "stve<VI_char>x %1,%y0"
2528  [(set_attr "type" "vecstore")])
2529
2530(define_insn "*altivec_stvesfx"
2531  [(set (match_operand:SF 0 "memory_operand" "=Z")
2532	(unspec:SF [(match_operand:V4SF 1 "register_operand" "v")] UNSPEC_STVE))]
2533  "TARGET_ALTIVEC"
2534  "stvewx %1,%y0"
2535  [(set_attr "type" "vecstore")])
2536
2537;; Generate
2538;;    xxlxor/vxor SCRATCH0,SCRATCH0,SCRATCH0
2539;;    vsubu?m SCRATCH2,SCRATCH1,%1
2540;;    vmaxs? %0,%1,SCRATCH2"
2541(define_expand "abs<mode>2"
2542  [(set (match_dup 2) (match_dup 3))
2543   (set (match_dup 4)
2544        (minus:VI2 (match_dup 2)
2545		   (match_operand:VI2 1 "register_operand" "v")))
2546   (set (match_operand:VI2 0 "register_operand" "=v")
2547        (smax:VI2 (match_dup 1) (match_dup 4)))]
2548  "<VI_unit>"
2549{
2550  int i, n_elt = GET_MODE_NUNITS (<MODE>mode);
2551  rtvec v = rtvec_alloc (n_elt);
2552
2553  /* Create an all 0 constant.  */
2554  for (i = 0; i < n_elt; ++i)
2555    RTVEC_ELT (v, i) = const0_rtx;
2556
2557  operands[2] = gen_reg_rtx (<MODE>mode);
2558  operands[3] = gen_rtx_CONST_VECTOR (<MODE>mode, v);
2559  operands[4] = gen_reg_rtx (<MODE>mode);
2560})
2561
2562;; Generate
2563;;    vspltisw SCRATCH1,-1
2564;;    vslw SCRATCH2,SCRATCH1,SCRATCH1
2565;;    vandc %0,%1,SCRATCH2
2566(define_expand "altivec_absv4sf2"
2567  [(set (match_dup 2)
2568	(vec_duplicate:V4SI (const_int -1)))
2569   (set (match_dup 3)
2570        (ashift:V4SI (match_dup 2) (match_dup 2)))
2571   (set (match_operand:V4SF 0 "register_operand" "=v")
2572        (and:V4SF (not:V4SF (subreg:V4SF (match_dup 3) 0))
2573                  (match_operand:V4SF 1 "register_operand" "v")))]
2574  "TARGET_ALTIVEC"
2575{
2576  operands[2] = gen_reg_rtx (V4SImode);
2577  operands[3] = gen_reg_rtx (V4SImode);
2578})
2579
2580;; Generate
2581;;    vspltis? SCRATCH0,0
2582;;    vsubs?s SCRATCH2,SCRATCH1,%1
2583;;    vmaxs? %0,%1,SCRATCH2"
2584(define_expand "altivec_abss_<mode>"
2585  [(set (match_dup 2) (vec_duplicate:VI (const_int 0)))
2586   (parallel [(set (match_dup 3)
2587		   (unspec:VI [(match_dup 2)
2588			       (match_operand:VI 1 "register_operand" "v")]
2589			      UNSPEC_VSUBS))
2590              (set (reg:SI 110) (unspec:SI [(const_int 0)] UNSPEC_SET_VSCR))])
2591   (set (match_operand:VI 0 "register_operand" "=v")
2592        (smax:VI (match_dup 1) (match_dup 3)))]
2593  "TARGET_ALTIVEC"
2594{
2595  operands[2] = gen_reg_rtx (GET_MODE (operands[0]));
2596  operands[3] = gen_reg_rtx (GET_MODE (operands[0]));
2597})
2598
2599(define_expand "reduc_splus_<mode>"
2600  [(set (match_operand:VIshort 0 "register_operand" "=v")
2601        (unspec:VIshort [(match_operand:VIshort 1 "register_operand" "v")]
2602			UNSPEC_REDUC_PLUS))]
2603  "TARGET_ALTIVEC"
2604{
2605  rtx vzero = gen_reg_rtx (V4SImode);
2606  rtx vtmp1 = gen_reg_rtx (V4SImode);
2607  rtx dest = gen_lowpart (V4SImode, operands[0]);
2608
2609  emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2610  emit_insn (gen_altivec_vsum4s<VI_char>s (vtmp1, operands[1], vzero));
2611  emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
2612  DONE;
2613})
2614
2615(define_expand "reduc_uplus_v16qi"
2616  [(set (match_operand:V16QI 0 "register_operand" "=v")
2617        (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
2618		      UNSPEC_REDUC_PLUS))]
2619  "TARGET_ALTIVEC"
2620{
2621  rtx vzero = gen_reg_rtx (V4SImode);
2622  rtx vtmp1 = gen_reg_rtx (V4SImode);
2623  rtx dest = gen_lowpart (V4SImode, operands[0]);
2624
2625  emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2626  emit_insn (gen_altivec_vsum4ubs (vtmp1, operands[1], vzero));
2627  emit_insn (gen_altivec_vsumsws_direct (dest, vtmp1, vzero));
2628  DONE;
2629})
2630
2631(define_expand "neg<mode>2"
2632  [(use (match_operand:VI 0 "register_operand" ""))
2633   (use (match_operand:VI 1 "register_operand" ""))]
2634  "TARGET_ALTIVEC"
2635  "
2636{
2637  rtx vzero;
2638
2639  vzero = gen_reg_rtx (GET_MODE (operands[0]));
2640  emit_insn (gen_altivec_vspltis<VI_char> (vzero, const0_rtx));
2641  emit_insn (gen_sub<mode>3 (operands[0], vzero, operands[1]));
2642
2643  DONE;
2644}")
2645
2646(define_expand "udot_prod<mode>"
2647  [(set (match_operand:V4SI 0 "register_operand" "=v")
2648        (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
2649                   (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")
2650                                 (match_operand:VIshort 2 "register_operand" "v")]
2651                                UNSPEC_VMSUMU)))]
2652  "TARGET_ALTIVEC"
2653  "
2654{
2655  emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], operands[2], operands[3]));
2656  DONE;
2657}")
2658
2659(define_expand "sdot_prodv8hi"
2660  [(set (match_operand:V4SI 0 "register_operand" "=v")
2661        (plus:V4SI (match_operand:V4SI 3 "register_operand" "v")
2662                   (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
2663                                 (match_operand:V8HI 2 "register_operand" "v")]
2664                                UNSPEC_VMSUMSHM)))]
2665  "TARGET_ALTIVEC"
2666  "
2667{
2668  emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], operands[2], operands[3]));
2669  DONE;
2670}")
2671
2672(define_expand "widen_usum<mode>3"
2673  [(set (match_operand:V4SI 0 "register_operand" "=v")
2674        (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2675                   (unspec:V4SI [(match_operand:VIshort 1 "register_operand" "v")]
2676                                UNSPEC_VMSUMU)))]
2677  "TARGET_ALTIVEC"
2678  "
2679{
2680  rtx vones = gen_reg_rtx (GET_MODE (operands[1]));
2681
2682  emit_insn (gen_altivec_vspltis<VI_char> (vones, const1_rtx));
2683  emit_insn (gen_altivec_vmsumu<VI_char>m (operands[0], operands[1], vones, operands[2]));
2684  DONE;
2685}")
2686
2687(define_expand "widen_ssumv16qi3"
2688  [(set (match_operand:V4SI 0 "register_operand" "=v")
2689        (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2690                   (unspec:V4SI [(match_operand:V16QI 1 "register_operand" "v")]
2691                                UNSPEC_VMSUMM)))]
2692  "TARGET_ALTIVEC"
2693  "
2694{
2695  rtx vones = gen_reg_rtx (V16QImode);
2696
2697  emit_insn (gen_altivec_vspltisb (vones, const1_rtx));
2698  emit_insn (gen_altivec_vmsummbm (operands[0], operands[1], vones, operands[2]));
2699  DONE;
2700}")
2701
2702(define_expand "widen_ssumv8hi3"
2703  [(set (match_operand:V4SI 0 "register_operand" "=v")
2704        (plus:V4SI (match_operand:V4SI 2 "register_operand" "v")
2705                   (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2706                                UNSPEC_VMSUMSHM)))]
2707  "TARGET_ALTIVEC"
2708  "
2709{
2710  rtx vones = gen_reg_rtx (V8HImode);
2711
2712  emit_insn (gen_altivec_vspltish (vones, const1_rtx));
2713  emit_insn (gen_altivec_vmsumshm (operands[0], operands[1], vones, operands[2]));
2714  DONE;
2715}")
2716
2717(define_expand "vec_unpacks_hi_<VP_small_lc>"
2718  [(set (match_operand:VP 0 "register_operand" "=v")
2719        (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2720		   UNSPEC_VUNPACK_HI_SIGN_DIRECT))]
2721  "<VI_unit>"
2722  "")
2723
2724(define_expand "vec_unpacks_lo_<VP_small_lc>"
2725  [(set (match_operand:VP 0 "register_operand" "=v")
2726        (unspec:VP [(match_operand:<VP_small> 1 "register_operand" "v")]
2727		   UNSPEC_VUNPACK_LO_SIGN_DIRECT))]
2728  "<VI_unit>"
2729  "")
2730
2731(define_insn "vperm_v8hiv4si"
2732  [(set (match_operand:V4SI 0 "register_operand" "=v")
2733        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
2734                   (match_operand:V4SI 2 "register_operand" "v")
2735                   (match_operand:V16QI 3 "register_operand" "v")]
2736                  UNSPEC_VPERMSI))]
2737  "TARGET_ALTIVEC"
2738  "vperm %0,%1,%2,%3"
2739  [(set_attr "type" "vecperm")])
2740
2741(define_insn "vperm_v16qiv8hi"
2742  [(set (match_operand:V8HI 0 "register_operand" "=v")
2743        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2744                   (match_operand:V8HI 2 "register_operand" "v")
2745                   (match_operand:V16QI 3 "register_operand" "v")]
2746                  UNSPEC_VPERMHI))]
2747  "TARGET_ALTIVEC"
2748  "vperm %0,%1,%2,%3"
2749  [(set_attr "type" "vecperm")])
2750
2751
2752(define_expand "vec_unpacku_hi_v16qi"
2753  [(set (match_operand:V8HI 0 "register_operand" "=v")
2754        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
2755                     UNSPEC_VUPKHUB))]
2756  "TARGET_ALTIVEC"
2757  "
2758{
2759  rtx vzero = gen_reg_rtx (V8HImode);
2760  rtx mask = gen_reg_rtx (V16QImode);
2761  rtvec v = rtvec_alloc (16);
2762  bool be = BYTES_BIG_ENDIAN;
2763
2764  emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
2765
2766  RTVEC_ELT (v,  0) = gen_rtx_CONST_INT (QImode, be ? 16 :  7);
2767  RTVEC_ELT (v,  1) = gen_rtx_CONST_INT (QImode, be ?  0 : 16);
2768  RTVEC_ELT (v,  2) = gen_rtx_CONST_INT (QImode, be ? 16 :  6);
2769  RTVEC_ELT (v,  3) = gen_rtx_CONST_INT (QImode, be ?  1 : 16);
2770  RTVEC_ELT (v,  4) = gen_rtx_CONST_INT (QImode, be ? 16 :  5);
2771  RTVEC_ELT (v,  5) = gen_rtx_CONST_INT (QImode, be ?  2 : 16);
2772  RTVEC_ELT (v,  6) = gen_rtx_CONST_INT (QImode, be ? 16 :  4);
2773  RTVEC_ELT (v,  7) = gen_rtx_CONST_INT (QImode, be ?  3 : 16);
2774  RTVEC_ELT (v,  8) = gen_rtx_CONST_INT (QImode, be ? 16 :  3);
2775  RTVEC_ELT (v,  9) = gen_rtx_CONST_INT (QImode, be ?  4 : 16);
2776  RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 :  2);
2777  RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ?  5 : 16);
2778  RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 :  1);
2779  RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ?  6 : 16);
2780  RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 :  0);
2781  RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ?  7 : 16);
2782
2783  emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2784  emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
2785  DONE;
2786}")
2787
2788(define_expand "vec_unpacku_hi_v8hi"
2789  [(set (match_operand:V4SI 0 "register_operand" "=v")
2790        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2791                     UNSPEC_VUPKHUH))]
2792  "TARGET_ALTIVEC"
2793  "
2794{
2795  rtx vzero = gen_reg_rtx (V4SImode);
2796  rtx mask = gen_reg_rtx (V16QImode);
2797  rtvec v = rtvec_alloc (16);
2798  bool be = BYTES_BIG_ENDIAN;
2799
2800  emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2801
2802  RTVEC_ELT (v,  0) = gen_rtx_CONST_INT (QImode, be ? 16 :  7);
2803  RTVEC_ELT (v,  1) = gen_rtx_CONST_INT (QImode, be ? 17 :  6);
2804  RTVEC_ELT (v,  2) = gen_rtx_CONST_INT (QImode, be ?  0 : 17);
2805  RTVEC_ELT (v,  3) = gen_rtx_CONST_INT (QImode, be ?  1 : 16);
2806  RTVEC_ELT (v,  4) = gen_rtx_CONST_INT (QImode, be ? 16 :  5);
2807  RTVEC_ELT (v,  5) = gen_rtx_CONST_INT (QImode, be ? 17 :  4);
2808  RTVEC_ELT (v,  6) = gen_rtx_CONST_INT (QImode, be ?  2 : 17);
2809  RTVEC_ELT (v,  7) = gen_rtx_CONST_INT (QImode, be ?  3 : 16);
2810  RTVEC_ELT (v,  8) = gen_rtx_CONST_INT (QImode, be ? 16 :  3);
2811  RTVEC_ELT (v,  9) = gen_rtx_CONST_INT (QImode, be ? 17 :  2);
2812  RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ?  4 : 17);
2813  RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ?  5 : 16);
2814  RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 :  1);
2815  RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 :  0);
2816  RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ?  6 : 17);
2817  RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ?  7 : 16);
2818
2819  emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2820  emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
2821  DONE;
2822}")
2823
2824(define_expand "vec_unpacku_lo_v16qi"
2825  [(set (match_operand:V8HI 0 "register_operand" "=v")
2826        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")]
2827                     UNSPEC_VUPKLUB))]
2828  "TARGET_ALTIVEC"
2829  "
2830{
2831  rtx vzero = gen_reg_rtx (V8HImode);
2832  rtx mask = gen_reg_rtx (V16QImode);
2833  rtvec v = rtvec_alloc (16);
2834  bool be = BYTES_BIG_ENDIAN;
2835
2836  emit_insn (gen_altivec_vspltish (vzero, const0_rtx));
2837
2838  RTVEC_ELT (v,  0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
2839  RTVEC_ELT (v,  1) = gen_rtx_CONST_INT (QImode, be ?  8 : 16);
2840  RTVEC_ELT (v,  2) = gen_rtx_CONST_INT (QImode, be ? 16 : 14);
2841  RTVEC_ELT (v,  3) = gen_rtx_CONST_INT (QImode, be ?  9 : 16);
2842  RTVEC_ELT (v,  4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
2843  RTVEC_ELT (v,  5) = gen_rtx_CONST_INT (QImode, be ? 10 : 16);
2844  RTVEC_ELT (v,  6) = gen_rtx_CONST_INT (QImode, be ? 16 : 12);
2845  RTVEC_ELT (v,  7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
2846  RTVEC_ELT (v,  8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
2847  RTVEC_ELT (v,  9) = gen_rtx_CONST_INT (QImode, be ? 12 : 16);
2848  RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 16 : 10);
2849  RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
2850  RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 :  9);
2851  RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 14 : 16);
2852  RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 16 :  8);
2853  RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
2854
2855  emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2856  emit_insn (gen_vperm_v16qiv8hi (operands[0], operands[1], vzero, mask));
2857  DONE;
2858}")
2859
2860(define_expand "vec_unpacku_lo_v8hi"
2861  [(set (match_operand:V4SI 0 "register_operand" "=v")
2862        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")]
2863                     UNSPEC_VUPKLUH))]
2864  "TARGET_ALTIVEC"
2865  "
2866{
2867  rtx vzero = gen_reg_rtx (V4SImode);
2868  rtx mask = gen_reg_rtx (V16QImode);
2869  rtvec v = rtvec_alloc (16);
2870  bool be = BYTES_BIG_ENDIAN;
2871
2872  emit_insn (gen_altivec_vspltisw (vzero, const0_rtx));
2873
2874  RTVEC_ELT (v,  0) = gen_rtx_CONST_INT (QImode, be ? 16 : 15);
2875  RTVEC_ELT (v,  1) = gen_rtx_CONST_INT (QImode, be ? 17 : 14);
2876  RTVEC_ELT (v,  2) = gen_rtx_CONST_INT (QImode, be ?  8 : 17);
2877  RTVEC_ELT (v,  3) = gen_rtx_CONST_INT (QImode, be ?  9 : 16);
2878  RTVEC_ELT (v,  4) = gen_rtx_CONST_INT (QImode, be ? 16 : 13);
2879  RTVEC_ELT (v,  5) = gen_rtx_CONST_INT (QImode, be ? 17 : 12);
2880  RTVEC_ELT (v,  6) = gen_rtx_CONST_INT (QImode, be ? 10 : 17);
2881  RTVEC_ELT (v,  7) = gen_rtx_CONST_INT (QImode, be ? 11 : 16);
2882  RTVEC_ELT (v,  8) = gen_rtx_CONST_INT (QImode, be ? 16 : 11);
2883  RTVEC_ELT (v,  9) = gen_rtx_CONST_INT (QImode, be ? 17 : 10);
2884  RTVEC_ELT (v, 10) = gen_rtx_CONST_INT (QImode, be ? 12 : 17);
2885  RTVEC_ELT (v, 11) = gen_rtx_CONST_INT (QImode, be ? 13 : 16);
2886  RTVEC_ELT (v, 12) = gen_rtx_CONST_INT (QImode, be ? 16 :  9);
2887  RTVEC_ELT (v, 13) = gen_rtx_CONST_INT (QImode, be ? 17 :  8);
2888  RTVEC_ELT (v, 14) = gen_rtx_CONST_INT (QImode, be ? 14 : 17);
2889  RTVEC_ELT (v, 15) = gen_rtx_CONST_INT (QImode, be ? 15 : 16);
2890
2891  emit_insn (gen_vec_initv16qi (mask, gen_rtx_PARALLEL (V16QImode, v)));
2892  emit_insn (gen_vperm_v8hiv4si (operands[0], operands[1], vzero, mask));
2893  DONE;
2894}")
2895
2896(define_expand "vec_widen_umult_hi_v16qi"
2897  [(set (match_operand:V8HI 0 "register_operand" "=v")
2898        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2899                      (match_operand:V16QI 2 "register_operand" "v")]
2900                     UNSPEC_VMULWHUB))]
2901  "TARGET_ALTIVEC"
2902  "
2903{
2904  rtx ve = gen_reg_rtx (V8HImode);
2905  rtx vo = gen_reg_rtx (V8HImode);
2906
2907  if (BYTES_BIG_ENDIAN)
2908    {
2909      emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
2910      emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
2911      emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
2912    }
2913  else
2914    {
2915      emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
2916      emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
2917      emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
2918    }
2919  DONE;
2920}")
2921
2922(define_expand "vec_widen_umult_lo_v16qi"
2923  [(set (match_operand:V8HI 0 "register_operand" "=v")
2924        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2925                      (match_operand:V16QI 2 "register_operand" "v")]
2926                     UNSPEC_VMULWLUB))]
2927  "TARGET_ALTIVEC"
2928  "
2929{
2930  rtx ve = gen_reg_rtx (V8HImode);
2931  rtx vo = gen_reg_rtx (V8HImode);
2932
2933  if (BYTES_BIG_ENDIAN)
2934    {
2935      emit_insn (gen_altivec_vmuleub (ve, operands[1], operands[2]));
2936      emit_insn (gen_altivec_vmuloub (vo, operands[1], operands[2]));
2937      emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
2938    }
2939  else
2940    {
2941      emit_insn (gen_altivec_vmuloub (ve, operands[1], operands[2]));
2942      emit_insn (gen_altivec_vmuleub (vo, operands[1], operands[2]));
2943      emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
2944    }
2945  DONE;
2946}")
2947
2948(define_expand "vec_widen_smult_hi_v16qi"
2949  [(set (match_operand:V8HI 0 "register_operand" "=v")
2950        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2951                      (match_operand:V16QI 2 "register_operand" "v")]
2952                     UNSPEC_VMULWHSB))]
2953  "TARGET_ALTIVEC"
2954  "
2955{
2956  rtx ve = gen_reg_rtx (V8HImode);
2957  rtx vo = gen_reg_rtx (V8HImode);
2958
2959  if (BYTES_BIG_ENDIAN)
2960    {
2961      emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
2962      emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
2963      emit_insn (gen_altivec_vmrghh_direct (operands[0], ve, vo));
2964    }
2965  else
2966    {
2967      emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
2968      emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
2969      emit_insn (gen_altivec_vmrghh_direct (operands[0], vo, ve));
2970    }
2971  DONE;
2972}")
2973
2974(define_expand "vec_widen_smult_lo_v16qi"
2975  [(set (match_operand:V8HI 0 "register_operand" "=v")
2976        (unspec:V8HI [(match_operand:V16QI 1 "register_operand" "v")
2977                      (match_operand:V16QI 2 "register_operand" "v")]
2978                     UNSPEC_VMULWLSB))]
2979  "TARGET_ALTIVEC"
2980  "
2981{
2982  rtx ve = gen_reg_rtx (V8HImode);
2983  rtx vo = gen_reg_rtx (V8HImode);
2984
2985  if (BYTES_BIG_ENDIAN)
2986    {
2987      emit_insn (gen_altivec_vmulesb (ve, operands[1], operands[2]));
2988      emit_insn (gen_altivec_vmulosb (vo, operands[1], operands[2]));
2989      emit_insn (gen_altivec_vmrglh_direct (operands[0], ve, vo));
2990    }
2991  else
2992    {
2993      emit_insn (gen_altivec_vmulosb (ve, operands[1], operands[2]));
2994      emit_insn (gen_altivec_vmulesb (vo, operands[1], operands[2]));
2995      emit_insn (gen_altivec_vmrglh_direct (operands[0], vo, ve));
2996    }
2997  DONE;
2998}")
2999
3000(define_expand "vec_widen_umult_hi_v8hi"
3001  [(set (match_operand:V4SI 0 "register_operand" "=v")
3002        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3003                      (match_operand:V8HI 2 "register_operand" "v")]
3004                     UNSPEC_VMULWHUH))]
3005  "TARGET_ALTIVEC"
3006  "
3007{
3008  rtx ve = gen_reg_rtx (V4SImode);
3009  rtx vo = gen_reg_rtx (V4SImode);
3010
3011  if (BYTES_BIG_ENDIAN)
3012    {
3013      emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3014      emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3015      emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3016    }
3017  else
3018    {
3019      emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3020      emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3021      emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3022    }
3023  DONE;
3024}")
3025
3026(define_expand "vec_widen_umult_lo_v8hi"
3027  [(set (match_operand:V4SI 0 "register_operand" "=v")
3028        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3029                      (match_operand:V8HI 2 "register_operand" "v")]
3030                     UNSPEC_VMULWLUH))]
3031  "TARGET_ALTIVEC"
3032  "
3033{
3034  rtx ve = gen_reg_rtx (V4SImode);
3035  rtx vo = gen_reg_rtx (V4SImode);
3036
3037  if (BYTES_BIG_ENDIAN)
3038    {
3039      emit_insn (gen_altivec_vmuleuh (ve, operands[1], operands[2]));
3040      emit_insn (gen_altivec_vmulouh (vo, operands[1], operands[2]));
3041      emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3042    }
3043  else
3044    {
3045      emit_insn (gen_altivec_vmulouh (ve, operands[1], operands[2]));
3046      emit_insn (gen_altivec_vmuleuh (vo, operands[1], operands[2]));
3047      emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3048    }
3049  DONE;
3050}")
3051
3052(define_expand "vec_widen_smult_hi_v8hi"
3053  [(set (match_operand:V4SI 0 "register_operand" "=v")
3054        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3055                      (match_operand:V8HI 2 "register_operand" "v")]
3056                     UNSPEC_VMULWHSH))]
3057  "TARGET_ALTIVEC"
3058  "
3059{
3060  rtx ve = gen_reg_rtx (V4SImode);
3061  rtx vo = gen_reg_rtx (V4SImode);
3062
3063  if (BYTES_BIG_ENDIAN)
3064    {
3065      emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3066      emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3067      emit_insn (gen_altivec_vmrghw_direct (operands[0], ve, vo));
3068    }
3069  else
3070    {
3071      emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3072      emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3073      emit_insn (gen_altivec_vmrghw_direct (operands[0], vo, ve));
3074    }
3075  DONE;
3076}")
3077
3078(define_expand "vec_widen_smult_lo_v8hi"
3079  [(set (match_operand:V4SI 0 "register_operand" "=v")
3080        (unspec:V4SI [(match_operand:V8HI 1 "register_operand" "v")
3081                      (match_operand:V8HI 2 "register_operand" "v")]
3082                     UNSPEC_VMULWLSH))]
3083  "TARGET_ALTIVEC"
3084  "
3085{
3086  rtx ve = gen_reg_rtx (V4SImode);
3087  rtx vo = gen_reg_rtx (V4SImode);
3088
3089  if (BYTES_BIG_ENDIAN)
3090    {
3091      emit_insn (gen_altivec_vmulesh (ve, operands[1], operands[2]));
3092      emit_insn (gen_altivec_vmulosh (vo, operands[1], operands[2]));
3093      emit_insn (gen_altivec_vmrglw_direct (operands[0], ve, vo));
3094    }
3095  else
3096    {
3097      emit_insn (gen_altivec_vmulosh (ve, operands[1], operands[2]));
3098      emit_insn (gen_altivec_vmulesh (vo, operands[1], operands[2]));
3099      emit_insn (gen_altivec_vmrglw_direct (operands[0], vo, ve));
3100    }
3101  DONE;
3102}")
3103
3104(define_expand "vec_pack_trunc_<mode>"
3105  [(set (match_operand:<VP_small> 0 "register_operand" "=v")
3106        (unspec:<VP_small> [(match_operand:VP 1 "register_operand" "v")
3107			    (match_operand:VP 2 "register_operand" "v")]
3108                      UNSPEC_VPACK_UNS_UNS_MOD))]
3109  "<VI_unit>"
3110  "")
3111
3112(define_expand "altivec_negv4sf2"
3113  [(use (match_operand:V4SF 0 "register_operand" ""))
3114   (use (match_operand:V4SF 1 "register_operand" ""))]
3115  "TARGET_ALTIVEC"
3116  "
3117{
3118  rtx neg0;
3119
3120  /* Generate [-0.0, -0.0, -0.0, -0.0].  */
3121  neg0 = gen_reg_rtx (V4SImode);
3122  emit_insn (gen_altivec_vspltisw (neg0, constm1_rtx));
3123  emit_insn (gen_vashlv4si3 (neg0, neg0, neg0));
3124
3125  /* XOR */
3126  emit_insn (gen_xorv4sf3 (operands[0],
3127			   gen_lowpart (V4SFmode, neg0), operands[1]));
3128
3129  DONE;
3130}")
3131
3132;; Vector SIMD PEM v2.06c defines LVLX, LVLXL, LVRX, LVRXL,
3133;; STVLX, STVLXL, STVVRX, STVRXL are available only on Cell.
3134(define_insn "altivec_lvlx"
3135  [(set (match_operand:V16QI 0 "register_operand" "=v")
3136        (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3137		      UNSPEC_LVLX))]
3138  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3139  "lvlx %0,%y1"
3140  [(set_attr "type" "vecload")])
3141
3142(define_insn "altivec_lvlxl"
3143  [(set (match_operand:V16QI 0 "register_operand" "=v")
3144        (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3145		      UNSPEC_LVLXL))]
3146  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3147  "lvlxl %0,%y1"
3148  [(set_attr "type" "vecload")])
3149
3150(define_insn "altivec_lvrx"
3151  [(set (match_operand:V16QI 0 "register_operand" "=v")
3152        (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3153		      UNSPEC_LVRX))]
3154  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3155  "lvrx %0,%y1"
3156  [(set_attr "type" "vecload")])
3157
3158(define_insn "altivec_lvrxl"
3159  [(set (match_operand:V16QI 0 "register_operand" "=v")
3160        (unspec:V16QI [(match_operand:BLK 1 "memory_operand" "Z")]
3161		      UNSPEC_LVRXL))]
3162  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3163  "lvrxl %0,%y1"
3164  [(set_attr "type" "vecload")])
3165
3166(define_insn "altivec_stvlx"
3167  [(parallel
3168    [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3169	  (match_operand:V16QI 1 "register_operand" "v"))
3170     (unspec [(const_int 0)] UNSPEC_STVLX)])]
3171  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3172  "stvlx %1,%y0"
3173  [(set_attr "type" "vecstore")])
3174
3175(define_insn "altivec_stvlxl"
3176  [(parallel
3177    [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3178	  (match_operand:V16QI 1 "register_operand" "v"))
3179     (unspec [(const_int 0)] UNSPEC_STVLXL)])]
3180  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3181  "stvlxl %1,%y0"
3182  [(set_attr "type" "vecstore")])
3183
3184(define_insn "altivec_stvrx"
3185  [(parallel
3186    [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3187	  (match_operand:V16QI 1 "register_operand" "v"))
3188     (unspec [(const_int 0)] UNSPEC_STVRX)])]
3189  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3190  "stvrx %1,%y0"
3191  [(set_attr "type" "vecstore")])
3192
3193(define_insn "altivec_stvrxl"
3194  [(parallel
3195    [(set (match_operand:V16QI 0 "memory_operand" "=Z")
3196	  (match_operand:V16QI 1 "register_operand" "v"))
3197     (unspec [(const_int 0)] UNSPEC_STVRXL)])]
3198  "TARGET_ALTIVEC && rs6000_cpu == PROCESSOR_CELL"
3199  "stvrxl %1,%y0"
3200  [(set_attr "type" "vecstore")])
3201
3202(define_expand "vec_unpacks_float_hi_v8hi"
3203 [(set (match_operand:V4SF 0 "register_operand" "")
3204        (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3205                     UNSPEC_VUPKHS_V4SF))]
3206  "TARGET_ALTIVEC"
3207  "
3208{
3209  rtx tmp = gen_reg_rtx (V4SImode);
3210
3211  emit_insn (gen_vec_unpacks_hi_v8hi (tmp, operands[1]));
3212  emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3213  DONE;
3214}")
3215
3216(define_expand "vec_unpacks_float_lo_v8hi"
3217 [(set (match_operand:V4SF 0 "register_operand" "")
3218        (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3219                     UNSPEC_VUPKLS_V4SF))]
3220  "TARGET_ALTIVEC"
3221  "
3222{
3223  rtx tmp = gen_reg_rtx (V4SImode);
3224
3225  emit_insn (gen_vec_unpacks_lo_v8hi (tmp, operands[1]));
3226  emit_insn (gen_altivec_vcfsx (operands[0], tmp, const0_rtx));
3227  DONE;
3228}")
3229
3230(define_expand "vec_unpacku_float_hi_v8hi"
3231 [(set (match_operand:V4SF 0 "register_operand" "")
3232        (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3233                     UNSPEC_VUPKHU_V4SF))]
3234  "TARGET_ALTIVEC"
3235  "
3236{
3237  rtx tmp = gen_reg_rtx (V4SImode);
3238
3239  emit_insn (gen_vec_unpacku_hi_v8hi (tmp, operands[1]));
3240  emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3241  DONE;
3242}")
3243
3244(define_expand "vec_unpacku_float_lo_v8hi"
3245 [(set (match_operand:V4SF 0 "register_operand" "")
3246        (unspec:V4SF [(match_operand:V8HI 1 "register_operand" "")]
3247                     UNSPEC_VUPKLU_V4SF))]
3248  "TARGET_ALTIVEC"
3249  "
3250{
3251  rtx tmp = gen_reg_rtx (V4SImode);
3252
3253  emit_insn (gen_vec_unpacku_lo_v8hi (tmp, operands[1]));
3254  emit_insn (gen_altivec_vcfux (operands[0], tmp, const0_rtx));
3255  DONE;
3256}")
3257
3258
3259;; Power8 vector instructions encoded as Altivec instructions
3260
3261;; Vector count leading zeros
3262(define_insn "*p8v_clz<mode>2"
3263  [(set (match_operand:VI2 0 "register_operand" "=v")
3264	(clz:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3265  "TARGET_P8_VECTOR"
3266  "vclz<wd> %0,%1"
3267  [(set_attr "length" "4")
3268   (set_attr "type" "vecsimple")])
3269
3270;; Vector population count
3271(define_insn "*p8v_popcount<mode>2"
3272  [(set (match_operand:VI2 0 "register_operand" "=v")
3273        (popcount:VI2 (match_operand:VI2 1 "register_operand" "v")))]
3274  "TARGET_P8_VECTOR"
3275  "vpopcnt<wd> %0,%1"
3276  [(set_attr "length" "4")
3277   (set_attr "type" "vecsimple")])
3278
3279;; Vector Gather Bits by Bytes by Doubleword
3280(define_insn "p8v_vgbbd"
3281  [(set (match_operand:V16QI 0 "register_operand" "=v")
3282	(unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")]
3283		      UNSPEC_VGBBD))]
3284  "TARGET_P8_VECTOR"
3285  "vgbbd %0,%1"
3286  [(set_attr "length" "4")
3287   (set_attr "type" "vecsimple")])
3288
3289
3290;; 128-bit binary integer arithmetic
3291;; We have a special container type (V1TImode) to allow operations using the
3292;; ISA 2.07 128-bit binary support to target the VMX/altivec registers without
3293;; having to worry about the register allocator deciding GPRs are better.
3294
3295(define_insn "altivec_vadduqm"
3296  [(set (match_operand:V1TI 0 "register_operand" "=v")
3297	(plus:V1TI (match_operand:V1TI 1 "register_operand" "v")
3298		   (match_operand:V1TI 2 "register_operand" "v")))]
3299  "TARGET_VADDUQM"
3300  "vadduqm %0,%1,%2"
3301  [(set_attr "length" "4")
3302   (set_attr "type" "vecsimple")])
3303
3304(define_insn "altivec_vaddcuq"
3305  [(set (match_operand:V1TI 0 "register_operand" "=v")
3306	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3307		      (match_operand:V1TI 2 "register_operand" "v")]
3308		     UNSPEC_VADDCUQ))]
3309  "TARGET_VADDUQM"
3310  "vaddcuq %0,%1,%2"
3311  [(set_attr "length" "4")
3312   (set_attr "type" "vecsimple")])
3313
3314(define_insn "altivec_vsubuqm"
3315  [(set (match_operand:V1TI 0 "register_operand" "=v")
3316	(minus:V1TI (match_operand:V1TI 1 "register_operand" "v")
3317		    (match_operand:V1TI 2 "register_operand" "v")))]
3318  "TARGET_VADDUQM"
3319  "vsubuqm %0,%1,%2"
3320  [(set_attr "length" "4")
3321   (set_attr "type" "vecsimple")])
3322
3323(define_insn "altivec_vsubcuq"
3324  [(set (match_operand:V1TI 0 "register_operand" "=v")
3325	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3326		      (match_operand:V1TI 2 "register_operand" "v")]
3327		     UNSPEC_VSUBCUQ))]
3328  "TARGET_VADDUQM"
3329  "vsubcuq %0,%1,%2"
3330  [(set_attr "length" "4")
3331   (set_attr "type" "vecsimple")])
3332
3333(define_insn "altivec_vaddeuqm"
3334  [(set (match_operand:V1TI 0 "register_operand" "=v")
3335	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3336		      (match_operand:V1TI 2 "register_operand" "v")
3337		      (match_operand:V1TI 3 "register_operand" "v")]
3338		     UNSPEC_VADDEUQM))]
3339  "TARGET_VADDUQM"
3340  "vaddeuqm %0,%1,%2,%3"
3341  [(set_attr "length" "4")
3342   (set_attr "type" "vecsimple")])
3343
3344(define_insn "altivec_vaddecuq"
3345  [(set (match_operand:V1TI 0 "register_operand" "=v")
3346	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3347		      (match_operand:V1TI 2 "register_operand" "v")
3348		      (match_operand:V1TI 3 "register_operand" "v")]
3349		     UNSPEC_VADDECUQ))]
3350  "TARGET_VADDUQM"
3351  "vaddecuq %0,%1,%2,%3"
3352  [(set_attr "length" "4")
3353   (set_attr "type" "vecsimple")])
3354
3355(define_insn "altivec_vsubeuqm"
3356  [(set (match_operand:V1TI 0 "register_operand" "=v")
3357	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3358		      (match_operand:V1TI 2 "register_operand" "v")
3359		      (match_operand:V1TI 3 "register_operand" "v")]
3360		   UNSPEC_VSUBEUQM))]
3361  "TARGET_VADDUQM"
3362  "vsubeuqm %0,%1,%2,%3"
3363  [(set_attr "length" "4")
3364   (set_attr "type" "vecsimple")])
3365
3366(define_insn "altivec_vsubecuq"
3367  [(set (match_operand:V1TI 0 "register_operand" "=v")
3368	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3369		      (match_operand:V1TI 2 "register_operand" "v")
3370		      (match_operand:V1TI 3 "register_operand" "v")]
3371		     UNSPEC_VSUBECUQ))]
3372  "TARGET_VADDUQM"
3373  "vsubecuq %0,%1,%2,%3"
3374  [(set_attr "length" "4")
3375   (set_attr "type" "vecsimple")])
3376
3377;; We use V2DI as the output type to simplify converting the permute
3378;; bits into an integer
3379(define_insn "altivec_vbpermq"
3380  [(set (match_operand:V2DI 0 "register_operand" "=v")
3381	(unspec:V2DI [(match_operand:V16QI 1 "register_operand" "v")
3382		      (match_operand:V16QI 2 "register_operand" "v")]
3383		     UNSPEC_VBPERMQ))]
3384  "TARGET_P8_VECTOR"
3385  "vbpermq %0,%1,%2"
3386  [(set_attr "length" "4")
3387   (set_attr "type" "vecsimple")])
3388
3389;; Decimal Integer operations
3390(define_int_iterator UNSPEC_BCD_ADD_SUB [UNSPEC_BCDADD UNSPEC_BCDSUB])
3391
3392(define_int_attr bcd_add_sub [(UNSPEC_BCDADD "add")
3393			      (UNSPEC_BCDSUB "sub")])
3394
3395(define_code_iterator BCD_TEST [eq lt gt unordered])
3396
3397(define_insn "bcd<bcd_add_sub>"
3398  [(set (match_operand:V1TI 0 "register_operand" "")
3399	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
3400		      (match_operand:V1TI 2 "register_operand" "")
3401		      (match_operand:QI 3 "const_0_to_1_operand" "")]
3402		     UNSPEC_BCD_ADD_SUB))
3403   (clobber (reg:CCFP 74))]
3404  "TARGET_P8_VECTOR"
3405  "bcd<bcd_add_sub>. %0,%1,%2,%3"
3406  [(set_attr "length" "4")
3407   (set_attr "type" "vecsimple")])
3408
3409;; Use a floating point type (V2DFmode) for the compare to set CR6 so that we
3410;; can use the unordered test for BCD nans and add/subtracts that overflow.  An
3411;; UNORDERED test on an integer type (like V1TImode) is not defined.  The type
3412;; probably should be one that can go in the VMX (Altivec) registers, so we
3413;; can't use DDmode or DFmode.
3414(define_insn "*bcd<bcd_add_sub>_test"
3415  [(set (reg:CCFP 74)
3416	(compare:CCFP
3417	 (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "v")
3418		       (match_operand:V1TI 2 "register_operand" "v")
3419		       (match_operand:QI 3 "const_0_to_1_operand" "i")]
3420		      UNSPEC_BCD_ADD_SUB)
3421	 (match_operand:V2DF 4 "zero_constant" "j")))
3422   (clobber (match_scratch:V1TI 0 "=v"))]
3423  "TARGET_P8_VECTOR"
3424  "bcd<bcd_add_sub>. %0,%1,%2,%3"
3425  [(set_attr "length" "4")
3426   (set_attr "type" "vecsimple")])
3427
3428(define_insn "*bcd<bcd_add_sub>_test2"
3429  [(set (match_operand:V1TI 0 "register_operand" "=v")
3430	(unspec:V1TI [(match_operand:V1TI 1 "register_operand" "v")
3431		      (match_operand:V1TI 2 "register_operand" "v")
3432		      (match_operand:QI 3 "const_0_to_1_operand" "i")]
3433		     UNSPEC_BCD_ADD_SUB))
3434   (set (reg:CCFP 74)
3435	(compare:CCFP
3436	 (unspec:V2DF [(match_dup 1)
3437		       (match_dup 2)
3438		       (match_dup 3)]
3439		      UNSPEC_BCD_ADD_SUB)
3440	 (match_operand:V2DF 4 "zero_constant" "j")))]
3441  "TARGET_P8_VECTOR"
3442  "bcd<bcd_add_sub>. %0,%1,%2,%3"
3443  [(set_attr "length" "4")
3444   (set_attr "type" "vecsimple")])
3445
3446(define_expand "bcd<bcd_add_sub>_<code>"
3447  [(parallel [(set (reg:CCFP 74)
3448		   (compare:CCFP
3449		    (unspec:V2DF [(match_operand:V1TI 1 "register_operand" "")
3450				  (match_operand:V1TI 2 "register_operand" "")
3451				  (match_operand:QI 3 "const_0_to_1_operand" "")]
3452				 UNSPEC_BCD_ADD_SUB)
3453		    (match_dup 4)))
3454	      (clobber (match_scratch:V1TI 5 ""))])
3455   (set (match_operand:SI 0 "register_operand" "")
3456	(BCD_TEST:SI (reg:CCFP 74)
3457		     (const_int 0)))]
3458  "TARGET_P8_VECTOR"
3459{
3460  operands[4] = CONST0_RTX (V2DFmode);
3461})
3462
3463;; Peephole2 pattern to combine a bcdadd/bcdsub that calculates the value and
3464;; the bcdadd/bcdsub that tests the value.  The combiner won't work since
3465;; CR6 is a hard coded register.  Unfortunately, all of the Altivec predicate
3466;; support is hard coded to use the fixed register CR6 instead of creating
3467;; a register class for CR6.
3468
3469(define_peephole2
3470  [(parallel [(set (match_operand:V1TI 0 "register_operand" "")
3471		   (unspec:V1TI [(match_operand:V1TI 1 "register_operand" "")
3472				 (match_operand:V1TI 2 "register_operand" "")
3473				 (match_operand:QI 3 "const_0_to_1_operand" "")]
3474				UNSPEC_BCD_ADD_SUB))
3475	      (clobber (reg:CCFP 74))])
3476   (parallel [(set (reg:CCFP 74)
3477		   (compare:CCFP
3478		    (unspec:V2DF [(match_dup 1)
3479				  (match_dup 2)
3480				  (match_dup 3)]
3481				 UNSPEC_BCD_ADD_SUB)
3482		    (match_operand:V2DF 4 "zero_constant" "")))
3483	      (clobber (match_operand:V1TI 5 "register_operand" ""))])]
3484  "TARGET_P8_VECTOR"
3485  [(parallel [(set (match_dup 0)
3486		   (unspec:V1TI [(match_dup 1)
3487				 (match_dup 2)
3488				 (match_dup 3)]
3489				UNSPEC_BCD_ADD_SUB))
3490	      (set (reg:CCFP 74)
3491		   (compare:CCFP
3492		    (unspec:V2DF [(match_dup 1)
3493				  (match_dup 2)
3494				  (match_dup 3)]
3495				 UNSPEC_BCD_ADD_SUB)
3496		    (match_dup 4)))])])
3497