1*38fd1498Szrj;; GCC machine description for AVX512F instructions
2*38fd1498Szrj;; Copyright (C) 2013-2018 Free Software Foundation, Inc.
3*38fd1498Szrj;;
4*38fd1498Szrj;; This file is part of GCC.
5*38fd1498Szrj;;
6*38fd1498Szrj;; GCC is free software; you can redistribute it and/or modify
7*38fd1498Szrj;; it under the terms of the GNU General Public License as published by
8*38fd1498Szrj;; the Free Software Foundation; either version 3, or (at your option)
9*38fd1498Szrj;; any later version.
10*38fd1498Szrj;;
11*38fd1498Szrj;; GCC is distributed in the hope that it will be useful,
12*38fd1498Szrj;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13*38fd1498Szrj;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14*38fd1498Szrj;; GNU General Public License for more details.
15*38fd1498Szrj;;
16*38fd1498Szrj;; You should have received a copy of the GNU General Public License
17*38fd1498Szrj;; along with GCC; see the file COPYING3.  If not see
18*38fd1498Szrj;; <http://www.gnu.org/licenses/>.
19*38fd1498Szrj
20*38fd1498Szrj;; Some iterators for extending subst as much as possible
21*38fd1498Szrj;; All vectors (Use it for destination)
22*38fd1498Szrj(define_mode_iterator SUBST_V
23*38fd1498Szrj  [V64QI V32QI V16QI
24*38fd1498Szrj   V32HI V16HI V8HI
25*38fd1498Szrj   V16SI V8SI  V4SI
26*38fd1498Szrj   V8DI  V4DI  V2DI
27*38fd1498Szrj   V16SF V8SF  V4SF
28*38fd1498Szrj   V8DF  V4DF  V2DF])
29*38fd1498Szrj
30*38fd1498Szrj(define_mode_iterator SUBST_S
31*38fd1498Szrj  [QI HI SI DI])
32*38fd1498Szrj
33*38fd1498Szrj(define_mode_iterator SUBST_A
34*38fd1498Szrj  [V64QI V32QI V16QI
35*38fd1498Szrj   V32HI V16HI V8HI
36*38fd1498Szrj   V16SI V8SI  V4SI
37*38fd1498Szrj   V8DI  V4DI  V2DI
38*38fd1498Szrj   V16SF V8SF  V4SF
39*38fd1498Szrj   V8DF  V4DF  V2DF
40*38fd1498Szrj   QI HI SI DI SF DF])
41*38fd1498Szrj
42*38fd1498Szrj(define_subst_attr "mask_name" "mask" "" "_mask")
43*38fd1498Szrj(define_subst_attr "mask_applied" "mask" "false" "true")
44*38fd1498Szrj(define_subst_attr "mask_operand2" "mask" "" "%{%3%}%N2")
45*38fd1498Szrj(define_subst_attr "mask_operand3" "mask" "" "%{%4%}%N3")
46*38fd1498Szrj(define_subst_attr "mask_operand3_1" "mask" "" "%%{%%4%%}%%N3") ;; for sprintf
47*38fd1498Szrj(define_subst_attr "mask_operand4" "mask" "" "%{%5%}%N4")
48*38fd1498Szrj(define_subst_attr "mask_operand6" "mask" "" "%{%7%}%N6")
49*38fd1498Szrj(define_subst_attr "mask_operand7" "mask" "" "%{%8%}%N7")
50*38fd1498Szrj(define_subst_attr "mask_operand10" "mask" "" "%{%11%}%N10")
51*38fd1498Szrj(define_subst_attr "mask_operand11" "mask" "" "%{%12%}%N11")
52*38fd1498Szrj(define_subst_attr "mask_operand18" "mask" "" "%{%19%}%N18")
53*38fd1498Szrj(define_subst_attr "mask_operand19" "mask" "" "%{%20%}%N19")
54*38fd1498Szrj(define_subst_attr "mask_codefor" "mask" "*" "")
55*38fd1498Szrj(define_subst_attr "mask_operand_arg34" "mask" "" ", operands[3], operands[4]")
56*38fd1498Szrj(define_subst_attr "mask_mode512bit_condition" "mask" "1" "(<MODE_SIZE> == 64 || TARGET_AVX512VL)")
57*38fd1498Szrj(define_subst_attr "mask_avx512vl_condition" "mask" "1" "TARGET_AVX512VL")
58*38fd1498Szrj(define_subst_attr "mask_avx512bw_condition" "mask" "1" "TARGET_AVX512BW")
59*38fd1498Szrj(define_subst_attr "mask_avx512dq_condition" "mask" "1" "TARGET_AVX512DQ")
60*38fd1498Szrj(define_subst_attr "store_mask_constraint" "mask" "vm" "v")
61*38fd1498Szrj(define_subst_attr "store_mask_predicate" "mask" "nonimmediate_operand" "register_operand")
62*38fd1498Szrj(define_subst_attr "mask_prefix" "mask" "vex" "evex")
63*38fd1498Szrj(define_subst_attr "mask_prefix2" "mask" "maybe_vex" "evex")
64*38fd1498Szrj(define_subst_attr "mask_prefix3" "mask" "orig,vex" "evex,evex")
65*38fd1498Szrj(define_subst_attr "mask_prefix4" "mask" "orig,orig,vex" "evex,evex,evex")
66*38fd1498Szrj(define_subst_attr "mask_expand_op3" "mask" "3" "5")
67*38fd1498Szrj
68*38fd1498Szrj(define_subst "mask"
69*38fd1498Szrj  [(set (match_operand:SUBST_V 0)
70*38fd1498Szrj        (match_operand:SUBST_V 1))]
71*38fd1498Szrj  "TARGET_AVX512F"
72*38fd1498Szrj  [(set (match_dup 0)
73*38fd1498Szrj        (vec_merge:SUBST_V
74*38fd1498Szrj	  (match_dup 1)
75*38fd1498Szrj	  (match_operand:SUBST_V 2 "vector_move_operand" "0C")
76*38fd1498Szrj	  (match_operand:<avx512fmaskmode> 3 "register_operand" "Yk")))])
77*38fd1498Szrj
78*38fd1498Szrj(define_subst_attr "mask_scalar_merge_name" "mask_scalar_merge" "" "_mask")
79*38fd1498Szrj(define_subst_attr "mask_scalar_merge_operand3" "mask_scalar_merge" "" "%{%3%}")
80*38fd1498Szrj(define_subst_attr "mask_scalar_merge_operand4" "mask_scalar_merge" "" "%{%4%}")
81*38fd1498Szrj
82*38fd1498Szrj(define_subst "mask_scalar_merge"
83*38fd1498Szrj  [(set (match_operand:SUBST_S 0)
84*38fd1498Szrj        (match_operand:SUBST_S 1))]
85*38fd1498Szrj  "TARGET_AVX512F"
86*38fd1498Szrj  [(set (match_dup 0)
87*38fd1498Szrj        (and:SUBST_S
88*38fd1498Szrj	  (match_dup 1)
89*38fd1498Szrj	  (match_operand:SUBST_S 3 "register_operand" "Yk")))])
90*38fd1498Szrj
91*38fd1498Szrj(define_subst_attr "sd_maskz_name" "sd" "" "_maskz_1")
92*38fd1498Szrj(define_subst_attr "sd_mask_op4" "sd" "" "%{%5%}%N4")
93*38fd1498Szrj(define_subst_attr "sd_mask_op5" "sd" "" "%{%6%}%N5")
94*38fd1498Szrj(define_subst_attr "sd_mask_codefor" "sd" "*" "")
95*38fd1498Szrj(define_subst_attr "sd_mask_mode512bit_condition" "sd" "1" "(<MODE_SIZE> == 64 || TARGET_AVX512VL)")
96*38fd1498Szrj
97*38fd1498Szrj(define_subst "sd"
98*38fd1498Szrj [(set (match_operand:SUBST_V 0)
99*38fd1498Szrj       (match_operand:SUBST_V 1))]
100*38fd1498Szrj ""
101*38fd1498Szrj [(set (match_dup 0)
102*38fd1498Szrj       (vec_merge:SUBST_V
103*38fd1498Szrj	 (match_dup 1)
104*38fd1498Szrj	 (match_operand:SUBST_V 2 "const0_operand" "C")
105*38fd1498Szrj	 (match_operand:<avx512fmaskmode> 3 "register_operand" "Yk")))
106*38fd1498Szrj])
107*38fd1498Szrj
108*38fd1498Szrj(define_subst_attr "round_name" "round" "" "_round")
109*38fd1498Szrj(define_subst_attr "round_mask_operand2" "mask" "%R2" "%R4")
110*38fd1498Szrj(define_subst_attr "round_mask_operand3" "mask" "%R3" "%R5")
111*38fd1498Szrj(define_subst_attr "round_mask_operand4" "mask" "%R4" "%R6")
112*38fd1498Szrj(define_subst_attr "round_sd_mask_operand4" "sd" "%R4" "%R6")
113*38fd1498Szrj(define_subst_attr "round_op2" "round" "" "%R2")
114*38fd1498Szrj(define_subst_attr "round_op3" "round" "" "%R3")
115*38fd1498Szrj(define_subst_attr "round_op4" "round" "" "%R4")
116*38fd1498Szrj(define_subst_attr "round_op5" "round" "" "%R5")
117*38fd1498Szrj(define_subst_attr "round_op6" "round" "" "%R6")
118*38fd1498Szrj(define_subst_attr "round_mask_op2" "round" "" "<round_mask_operand2>")
119*38fd1498Szrj(define_subst_attr "round_mask_op3" "round" "" "<round_mask_operand3>")
120*38fd1498Szrj(define_subst_attr "round_mask_op4" "round" "" "<round_mask_operand4>")
121*38fd1498Szrj(define_subst_attr "round_sd_mask_op4" "round" "" "<round_sd_mask_operand4>")
122*38fd1498Szrj(define_subst_attr "round_constraint" "round" "vm" "v")
123*38fd1498Szrj(define_subst_attr "round_constraint2" "round" "m" "v")
124*38fd1498Szrj(define_subst_attr "round_constraint3" "round" "rm" "r")
125*38fd1498Szrj(define_subst_attr "round_nimm_predicate" "round" "vector_operand" "register_operand")
126*38fd1498Szrj(define_subst_attr "round_nimm_scalar_predicate" "round" "nonimmediate_operand" "register_operand")
127*38fd1498Szrj(define_subst_attr "round_prefix" "round" "vex" "evex")
128*38fd1498Szrj(define_subst_attr "round_mode512bit_condition" "round" "1" "(<MODE>mode == V16SFmode
129*38fd1498Szrj							      || <MODE>mode == V8DFmode
130*38fd1498Szrj							      || <MODE>mode == V8DImode
131*38fd1498Szrj							      || <MODE>mode == V16SImode)")
132*38fd1498Szrj(define_subst_attr "round_modev8sf_condition" "round" "1" "(<MODE>mode == V8SFmode)")
133*38fd1498Szrj(define_subst_attr "round_modev4sf_condition" "round" "1" "(<MODE>mode == V4SFmode)")
134*38fd1498Szrj(define_subst_attr "round_codefor" "round" "*" "")
135*38fd1498Szrj(define_subst_attr "round_opnum" "round" "5" "6")
136*38fd1498Szrj
137*38fd1498Szrj(define_subst "round"
138*38fd1498Szrj  [(set (match_operand:SUBST_A 0)
139*38fd1498Szrj	(match_operand:SUBST_A 1))]
140*38fd1498Szrj  "TARGET_AVX512F"
141*38fd1498Szrj  [(set (match_dup 0)
142*38fd1498Szrj	(unspec:SUBST_A [(match_dup 1)
143*38fd1498Szrj	  (match_operand:SI 2 "const_4_or_8_to_11_operand")]
144*38fd1498Szrj	  UNSPEC_EMBEDDED_ROUNDING))
145*38fd1498Szrj])
146*38fd1498Szrj
147*38fd1498Szrj(define_subst_attr "round_saeonly_name" "round_saeonly" "" "_round")
148*38fd1498Szrj(define_subst_attr "round_saeonly_mask_operand2" "mask" "%r2" "%r4")
149*38fd1498Szrj(define_subst_attr "round_saeonly_mask_operand3" "mask" "%r3" "%r5")
150*38fd1498Szrj(define_subst_attr "round_saeonly_mask_operand4" "mask" "%r4" "%r6")
151*38fd1498Szrj(define_subst_attr "round_saeonly_mask_scalar_merge_operand4" "mask_scalar_merge" "%r4" "%r5")
152*38fd1498Szrj(define_subst_attr "round_saeonly_sd_mask_operand5" "sd" "%r5" "%r7")
153*38fd1498Szrj(define_subst_attr "round_saeonly_op2" "round_saeonly" "" "%r2")
154*38fd1498Szrj(define_subst_attr "round_saeonly_op3" "round_saeonly" "" "%r3")
155*38fd1498Szrj(define_subst_attr "round_saeonly_op4" "round_saeonly" "" "%r4")
156*38fd1498Szrj(define_subst_attr "round_saeonly_op5" "round_saeonly" "" "%r5")
157*38fd1498Szrj(define_subst_attr "round_saeonly_op6" "round_saeonly" "" "%r6")
158*38fd1498Szrj(define_subst_attr "round_saeonly_prefix" "round_saeonly" "vex" "evex")
159*38fd1498Szrj(define_subst_attr "round_saeonly_mask_op2" "round_saeonly" "" "<round_saeonly_mask_operand2>")
160*38fd1498Szrj(define_subst_attr "round_saeonly_mask_op3" "round_saeonly" "" "<round_saeonly_mask_operand3>")
161*38fd1498Szrj(define_subst_attr "round_saeonly_mask_op4" "round_saeonly" "" "<round_saeonly_mask_operand4>")
162*38fd1498Szrj(define_subst_attr "round_saeonly_mask_scalar_merge_op4" "round_saeonly" "" "<round_saeonly_mask_scalar_merge_operand4>")
163*38fd1498Szrj(define_subst_attr "round_saeonly_sd_mask_op5" "round_saeonly" "" "<round_saeonly_sd_mask_operand5>")
164*38fd1498Szrj(define_subst_attr "round_saeonly_mask_arg3" "round_saeonly" "" ", operands[<mask_expand_op3>]")
165*38fd1498Szrj(define_subst_attr "round_saeonly_constraint" "round_saeonly" "vm" "v")
166*38fd1498Szrj(define_subst_attr "round_saeonly_constraint2" "round_saeonly" "m" "v")
167*38fd1498Szrj(define_subst_attr "round_saeonly_nimm_predicate" "round_saeonly" "vector_operand" "register_operand")
168*38fd1498Szrj(define_subst_attr "round_saeonly_nimm_scalar_predicate" "round_saeonly" "nonimmediate_operand" "register_operand")
169*38fd1498Szrj(define_subst_attr "round_saeonly_mode512bit_condition" "round_saeonly" "1" "(<MODE>mode == V16SFmode
170*38fd1498Szrj									      || <MODE>mode == V8DFmode
171*38fd1498Szrj									      || <MODE>mode == V8DImode
172*38fd1498Szrj									      || <MODE>mode == V16SImode)")
173*38fd1498Szrj(define_subst_attr "round_saeonly_modev8sf_condition" "round_saeonly" "1" "(<MODE>mode == V8SFmode)")
174*38fd1498Szrj
175*38fd1498Szrj(define_subst "round_saeonly"
176*38fd1498Szrj  [(set (match_operand:SUBST_A 0)
177*38fd1498Szrj        (match_operand:SUBST_A 1))]
178*38fd1498Szrj  "TARGET_AVX512F"
179*38fd1498Szrj  [(set (match_dup 0)
180*38fd1498Szrj	(unspec:SUBST_A [(match_dup 1)
181*38fd1498Szrj	  (match_operand:SI 2 "const48_operand")]
182*38fd1498Szrj	  UNSPEC_EMBEDDED_ROUNDING))
183*38fd1498Szrj])
184*38fd1498Szrj
185*38fd1498Szrj(define_subst "round_saeonly"
186*38fd1498Szrj  [(set (match_operand:CCFP 0)
187*38fd1498Szrj        (match_operand:CCFP 1))]
188*38fd1498Szrj  "TARGET_AVX512F"
189*38fd1498Szrj  [(set (match_dup 0)
190*38fd1498Szrj	(unspec:CCFP [(match_dup 1)
191*38fd1498Szrj	  (match_operand:SI 2 "const48_operand")]
192*38fd1498Szrj	  UNSPEC_EMBEDDED_ROUNDING))
193*38fd1498Szrj])
194*38fd1498Szrj
195*38fd1498Szrj(define_subst_attr "round_expand_name" "round_expand" "" "_round")
196*38fd1498Szrj(define_subst_attr "round_expand_nimm_predicate" "round_expand" "nonimmediate_operand" "register_operand")
197*38fd1498Szrj(define_subst_attr "round_expand_operand" "round_expand" "" ", operands[5]")
198*38fd1498Szrj
199*38fd1498Szrj(define_subst "round_expand"
200*38fd1498Szrj [(match_operand:SUBST_V 0)
201*38fd1498Szrj  (match_operand:SUBST_V 1)
202*38fd1498Szrj  (match_operand:SUBST_V 2)
203*38fd1498Szrj  (match_operand:SUBST_V 3)
204*38fd1498Szrj  (match_operand:SUBST_S 4)]
205*38fd1498Szrj  "TARGET_AVX512F"
206*38fd1498Szrj  [(match_dup 0)
207*38fd1498Szrj   (match_dup 1)
208*38fd1498Szrj   (match_dup 2)
209*38fd1498Szrj   (match_dup 3)
210*38fd1498Szrj   (match_dup 4)
211*38fd1498Szrj   (unspec [(match_operand:SI 5 "const_4_or_8_to_11_operand")] UNSPEC_EMBEDDED_ROUNDING)])
212*38fd1498Szrj
213*38fd1498Szrj(define_subst_attr "round_saeonly_expand_name" "round_saeonly_expand" "" "_round")
214*38fd1498Szrj(define_subst_attr "round_saeonly_expand_nimm_predicate" "round_saeonly_expand" "nonimmediate_operand" "register_operand")
215*38fd1498Szrj(define_subst_attr "round_saeonly_expand_operand6" "round_saeonly_expand" "" ", operands[6]")
216*38fd1498Szrj
217*38fd1498Szrj(define_subst "round_saeonly_expand"
218*38fd1498Szrj [(match_operand:SUBST_V 0)
219*38fd1498Szrj  (match_operand:SUBST_V 1)
220*38fd1498Szrj  (match_operand:SUBST_V 2)
221*38fd1498Szrj  (match_operand:SUBST_A 3)
222*38fd1498Szrj  (match_operand:SI 4)
223*38fd1498Szrj  (match_operand:SUBST_S 5)]
224*38fd1498Szrj  "TARGET_AVX512F"
225*38fd1498Szrj  [(match_dup 0)
226*38fd1498Szrj   (match_dup 1)
227*38fd1498Szrj   (match_dup 2)
228*38fd1498Szrj   (match_dup 3)
229*38fd1498Szrj   (match_dup 4)
230*38fd1498Szrj   (match_dup 5)
231*38fd1498Szrj   (unspec [(match_operand:SI 6 "const48_operand")] UNSPEC_EMBEDDED_ROUNDING)])
232*38fd1498Szrj
233*38fd1498Szrj(define_subst_attr "mask_expand4_name" "mask_expand4" "" "_mask")
234*38fd1498Szrj(define_subst_attr "mask_expand4_args" "mask_expand4" "" ", operands[4], operands[5]")
235*38fd1498Szrj
236*38fd1498Szrj(define_subst "mask_expand4"
237*38fd1498Szrj  [(match_operand:SUBST_V 0)
238*38fd1498Szrj   (match_operand:SUBST_V 1)
239*38fd1498Szrj   (match_operand:SUBST_V 2)
240*38fd1498Szrj   (match_operand:SI 3)]
241*38fd1498Szrj   "TARGET_AVX512VL"
242*38fd1498Szrj   [(match_dup 0)
243*38fd1498Szrj    (match_dup 1)
244*38fd1498Szrj    (match_dup 2)
245*38fd1498Szrj    (match_dup 3)
246*38fd1498Szrj    (match_operand:SUBST_V 4 "vector_move_operand")
247*38fd1498Szrj    (match_operand:<avx512fmaskmode> 5 "register_operand")])
248*38fd1498Szrj
249*38fd1498Szrj(define_subst_attr "mask_scalar_name" "mask_scalar" "" "_mask")
250*38fd1498Szrj(define_subst_attr "mask_scalar_operand3" "mask_scalar" "" "%{%4%}%N3")
251*38fd1498Szrj(define_subst_attr "mask_scalar_operand4" "mask_scalar" "" "%{%5%}%N4")
252*38fd1498Szrj
253*38fd1498Szrj(define_subst "mask_scalar"
254*38fd1498Szrj  [(set (match_operand:SUBST_V 0)
255*38fd1498Szrj	(vec_merge:SUBST_V
256*38fd1498Szrj	  (match_operand:SUBST_V 1)
257*38fd1498Szrj	  (match_operand:SUBST_V 2)
258*38fd1498Szrj	  (const_int 1)))]
259*38fd1498Szrj  "TARGET_AVX512F"
260*38fd1498Szrj  [(set (match_dup 0)
261*38fd1498Szrj	(vec_merge:SUBST_V
262*38fd1498Szrj	  (vec_merge:SUBST_V
263*38fd1498Szrj	    (match_dup 1)
264*38fd1498Szrj	    (match_operand:SUBST_V 3 "vector_move_operand" "0C")
265*38fd1498Szrj	    (match_operand:<avx512fmaskmode> 4 "register_operand" "Yk"))
266*38fd1498Szrj	  (match_dup 2)
267*38fd1498Szrj	  (const_int 1)))])
268*38fd1498Szrj
269*38fd1498Szrj(define_subst_attr "round_scalar_name" "round_scalar" "" "_round")
270*38fd1498Szrj(define_subst_attr "round_scalar_mask_operand3" "mask_scalar" "%R3" "%R5")
271*38fd1498Szrj(define_subst_attr "round_scalar_mask_op3" "round_scalar" "" "<round_scalar_mask_operand3>")
272*38fd1498Szrj(define_subst_attr "round_scalar_constraint" "round_scalar" "vm" "v")
273*38fd1498Szrj(define_subst_attr "round_scalar_prefix" "round_scalar" "vex" "evex")
274*38fd1498Szrj(define_subst_attr "round_scalar_nimm_predicate" "round_scalar" "vector_operand" "register_operand")
275*38fd1498Szrj
276*38fd1498Szrj(define_subst "round_scalar"
277*38fd1498Szrj  [(set (match_operand:SUBST_V 0)
278*38fd1498Szrj        (vec_merge:SUBST_V
279*38fd1498Szrj          (match_operand:SUBST_V 1)
280*38fd1498Szrj          (match_operand:SUBST_V 2)
281*38fd1498Szrj          (const_int 1)))]
282*38fd1498Szrj  "TARGET_AVX512F"
283*38fd1498Szrj  [(set (match_dup 0)
284*38fd1498Szrj	(unspec:SUBST_V [
285*38fd1498Szrj	     (vec_merge:SUBST_V
286*38fd1498Szrj		(match_dup 1)
287*38fd1498Szrj		(match_dup 2)
288*38fd1498Szrj		(const_int 1))
289*38fd1498Szrj	     (match_operand:SI 3 "const_4_or_8_to_11_operand")]
290*38fd1498Szrj		UNSPEC_EMBEDDED_ROUNDING))])
291*38fd1498Szrj
292*38fd1498Szrj(define_subst_attr "round_saeonly_scalar_name" "round_saeonly_scalar" "" "_round")
293*38fd1498Szrj(define_subst_attr "round_saeonly_scalar_mask_operand3" "mask_scalar" "%r3" "%r5")
294*38fd1498Szrj(define_subst_attr "round_saeonly_scalar_mask_operand4" "mask_scalar" "%r4" "%r6")
295*38fd1498Szrj(define_subst_attr "round_saeonly_scalar_mask_op3" "round_saeonly_scalar" "" "<round_saeonly_scalar_mask_operand3>")
296*38fd1498Szrj(define_subst_attr "round_saeonly_scalar_mask_op4" "round_saeonly_scalar" "" "<round_saeonly_scalar_mask_operand4>")
297*38fd1498Szrj(define_subst_attr "round_saeonly_scalar_constraint" "round_saeonly_scalar" "vm" "v")
298*38fd1498Szrj(define_subst_attr "round_saeonly_scalar_prefix" "round_saeonly_scalar" "vex" "evex")
299*38fd1498Szrj(define_subst_attr "round_saeonly_scalar_nimm_predicate" "round_saeonly_scalar" "vector_operand" "register_operand")
300*38fd1498Szrj
301*38fd1498Szrj(define_subst "round_saeonly_scalar"
302*38fd1498Szrj  [(set (match_operand:SUBST_V 0)
303*38fd1498Szrj        (vec_merge:SUBST_V
304*38fd1498Szrj          (match_operand:SUBST_V 1)
305*38fd1498Szrj          (match_operand:SUBST_V 2)
306*38fd1498Szrj          (const_int 1)))]
307*38fd1498Szrj  "TARGET_AVX512F"
308*38fd1498Szrj  [(set (match_dup 0)
309*38fd1498Szrj	(unspec:SUBST_V [
310*38fd1498Szrj	     (vec_merge:SUBST_V
311*38fd1498Szrj		(match_dup 1)
312*38fd1498Szrj		(match_dup 2)
313*38fd1498Szrj		(const_int 1))
314*38fd1498Szrj	     (match_operand:SI 3 "const48_operand")]
315*38fd1498Szrj		UNSPEC_EMBEDDED_ROUNDING))])
316