1//===-- X86InstrFragmentsSIMD.td - x86 SIMD ISA ------------*- tablegen -*-===// 2// 3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4// See https://llvm.org/LICENSE.txt for license information. 5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6// 7//===----------------------------------------------------------------------===// 8// 9// This file provides pattern fragments useful for SIMD instructions. 10// 11//===----------------------------------------------------------------------===// 12 13//===----------------------------------------------------------------------===// 14// MMX specific DAG Nodes. 15//===----------------------------------------------------------------------===// 16 17// Low word of MMX to GPR. 18def MMX_X86movd2w : SDNode<"X86ISD::MMX_MOVD2W", SDTypeProfile<1, 1, 19 [SDTCisVT<0, i32>, SDTCisVT<1, x86mmx>]>>; 20// GPR to low word of MMX. 21def MMX_X86movw2d : SDNode<"X86ISD::MMX_MOVW2D", SDTypeProfile<1, 1, 22 [SDTCisVT<0, x86mmx>, SDTCisVT<1, i32>]>>; 23 24//===----------------------------------------------------------------------===// 25// MMX Pattern Fragments 26//===----------------------------------------------------------------------===// 27 28def load_mmx : PatFrag<(ops node:$ptr), (x86mmx (load node:$ptr))>; 29 30//===----------------------------------------------------------------------===// 31// SSE specific DAG Nodes. 32//===----------------------------------------------------------------------===// 33 34def SDTX86VFCMP : SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>, 35 SDTCisSameAs<0, 1>, SDTCisSameAs<1, 2>, 36 SDTCisVT<3, i8>]>; 37 38def X86fmin : SDNode<"X86ISD::FMIN", SDTFPBinOp>; 39def X86fmax : SDNode<"X86ISD::FMAX", SDTFPBinOp>; 40def X86fmins : SDNode<"X86ISD::FMINS", SDTFPBinOp>; 41def X86fmaxs : SDNode<"X86ISD::FMAXS", SDTFPBinOp>; 42 43// Commutative and Associative FMIN and FMAX. 44def X86fminc : SDNode<"X86ISD::FMINC", SDTFPBinOp, 45 [SDNPCommutative, SDNPAssociative]>; 46def X86fmaxc : SDNode<"X86ISD::FMAXC", SDTFPBinOp, 47 [SDNPCommutative, SDNPAssociative]>; 48 49def X86fand : SDNode<"X86ISD::FAND", SDTFPBinOp, 50 [SDNPCommutative, SDNPAssociative]>; 51def X86for : SDNode<"X86ISD::FOR", SDTFPBinOp, 52 [SDNPCommutative, SDNPAssociative]>; 53def X86fxor : SDNode<"X86ISD::FXOR", SDTFPBinOp, 54 [SDNPCommutative, SDNPAssociative]>; 55def X86fandn : SDNode<"X86ISD::FANDN", SDTFPBinOp>; 56def X86frsqrt : SDNode<"X86ISD::FRSQRT", SDTFPUnaryOp>; 57def X86frcp : SDNode<"X86ISD::FRCP", SDTFPUnaryOp>; 58def X86fhadd : SDNode<"X86ISD::FHADD", SDTFPBinOp>; 59def X86fhsub : SDNode<"X86ISD::FHSUB", SDTFPBinOp>; 60def X86hadd : SDNode<"X86ISD::HADD", SDTIntBinOp>; 61def X86hsub : SDNode<"X86ISD::HSUB", SDTIntBinOp>; 62def X86comi : SDNode<"X86ISD::COMI", SDTX86FCmp>; 63def X86ucomi : SDNode<"X86ISD::UCOMI", SDTX86FCmp>; 64 65def SDTX86Cmps : SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisSameAs<0, 1>, 66 SDTCisSameAs<1, 2>, SDTCisVT<3, i8>]>; 67def X86cmps : SDNode<"X86ISD::FSETCC", SDTX86Cmps>; 68 69def X86pshufb : SDNode<"X86ISD::PSHUFB", 70 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i8>, SDTCisSameAs<0,1>, 71 SDTCisSameAs<0,2>]>>; 72def X86psadbw : SDNode<"X86ISD::PSADBW", 73 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>, 74 SDTCVecEltisVT<1, i8>, 75 SDTCisSameSizeAs<0,1>, 76 SDTCisSameAs<1,2>]>, [SDNPCommutative]>; 77def X86dbpsadbw : SDNode<"X86ISD::DBPSADBW", 78 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i16>, 79 SDTCVecEltisVT<1, i8>, 80 SDTCisSameSizeAs<0,1>, 81 SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>>; 82def X86andnp : SDNode<"X86ISD::ANDNP", 83 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 84 SDTCisSameAs<0,2>]>>; 85def X86multishift : SDNode<"X86ISD::MULTISHIFT", 86 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 87 SDTCisSameAs<1,2>]>>; 88def X86pextrb : SDNode<"X86ISD::PEXTRB", 89 SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisVT<1, v16i8>, 90 SDTCisVT<2, i8>]>>; 91def X86pextrw : SDNode<"X86ISD::PEXTRW", 92 SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisVT<1, v8i16>, 93 SDTCisVT<2, i8>]>>; 94def X86pinsrb : SDNode<"X86ISD::PINSRB", 95 SDTypeProfile<1, 3, [SDTCisVT<0, v16i8>, SDTCisSameAs<0,1>, 96 SDTCisVT<2, i32>, SDTCisVT<3, i8>]>>; 97def X86pinsrw : SDNode<"X86ISD::PINSRW", 98 SDTypeProfile<1, 3, [SDTCisVT<0, v8i16>, SDTCisSameAs<0,1>, 99 SDTCisVT<2, i32>, SDTCisVT<3, i8>]>>; 100def X86insertps : SDNode<"X86ISD::INSERTPS", 101 SDTypeProfile<1, 3, [SDTCisVT<0, v4f32>, SDTCisSameAs<0,1>, 102 SDTCisVT<2, v4f32>, SDTCisVT<3, i8>]>>; 103def X86vzmovl : SDNode<"X86ISD::VZEXT_MOVL", 104 SDTypeProfile<1, 1, [SDTCisSameAs<0,1>]>>; 105 106def X86vzld : SDNode<"X86ISD::VZEXT_LOAD", SDTLoad, 107 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>; 108def X86vextractst : SDNode<"X86ISD::VEXTRACT_STORE", SDTStore, 109 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 110def X86VBroadcastld : SDNode<"X86ISD::VBROADCAST_LOAD", SDTLoad, 111 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>; 112 113def SDTVtrunc : SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 114 SDTCisInt<0>, SDTCisInt<1>, 115 SDTCisOpSmallerThanOp<0, 1>]>; 116def SDTVmtrunc : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>, 117 SDTCisInt<0>, SDTCisInt<1>, 118 SDTCisOpSmallerThanOp<0, 1>, 119 SDTCisSameAs<0, 2>, 120 SDTCVecEltisVT<3, i1>, 121 SDTCisSameNumEltsAs<1, 3>]>; 122 123def X86vtrunc : SDNode<"X86ISD::VTRUNC", SDTVtrunc>; 124def X86vtruncs : SDNode<"X86ISD::VTRUNCS", SDTVtrunc>; 125def X86vtruncus : SDNode<"X86ISD::VTRUNCUS", SDTVtrunc>; 126def X86vmtrunc : SDNode<"X86ISD::VMTRUNC", SDTVmtrunc>; 127def X86vmtruncs : SDNode<"X86ISD::VMTRUNCS", SDTVmtrunc>; 128def X86vmtruncus : SDNode<"X86ISD::VMTRUNCUS", SDTVmtrunc>; 129 130def X86vfpext : SDNode<"X86ISD::VFPEXT", 131 SDTypeProfile<1, 1, [SDTCVecEltisVT<0, f64>, 132 SDTCVecEltisVT<1, f32>, 133 SDTCisSameSizeAs<0, 1>]>>; 134 135def X86strict_vfpext : SDNode<"X86ISD::STRICT_VFPEXT", 136 SDTypeProfile<1, 1, [SDTCVecEltisVT<0, f64>, 137 SDTCVecEltisVT<1, f32>, 138 SDTCisSameSizeAs<0, 1>]>, 139 [SDNPHasChain]>; 140 141def X86any_vfpext : PatFrags<(ops node:$src), 142 [(X86strict_vfpext node:$src), 143 (X86vfpext node:$src)]>; 144 145def X86vfpround: SDNode<"X86ISD::VFPROUND", 146 SDTypeProfile<1, 1, [SDTCVecEltisVT<0, f32>, 147 SDTCVecEltisVT<1, f64>, 148 SDTCisOpSmallerThanOp<0, 1>]>>; 149 150def X86strict_vfpround: SDNode<"X86ISD::STRICT_VFPROUND", 151 SDTypeProfile<1, 1, [SDTCVecEltisVT<0, f32>, 152 SDTCVecEltisVT<1, f64>, 153 SDTCisOpSmallerThanOp<0, 1>]>, 154 [SDNPHasChain]>; 155 156def X86any_vfpround : PatFrags<(ops node:$src), 157 [(X86strict_vfpround node:$src), 158 (X86vfpround node:$src)]>; 159 160def X86frounds : SDNode<"X86ISD::VFPROUNDS", 161 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, f32>, 162 SDTCisSameAs<0, 1>, 163 SDTCVecEltisVT<2, f64>, 164 SDTCisSameSizeAs<0, 2>]>>; 165 166def X86froundsRnd: SDNode<"X86ISD::VFPROUNDS_RND", 167 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, f32>, 168 SDTCisSameAs<0, 1>, 169 SDTCVecEltisVT<2, f64>, 170 SDTCisSameSizeAs<0, 2>, 171 SDTCisVT<3, i32>]>>; 172 173def X86fpexts : SDNode<"X86ISD::VFPEXTS", 174 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, f64>, 175 SDTCisSameAs<0, 1>, 176 SDTCVecEltisVT<2, f32>, 177 SDTCisSameSizeAs<0, 2>]>>; 178def X86fpextsSAE : SDNode<"X86ISD::VFPEXTS_SAE", 179 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, f64>, 180 SDTCisSameAs<0, 1>, 181 SDTCVecEltisVT<2, f32>, 182 SDTCisSameSizeAs<0, 2>]>>; 183 184def X86vmfpround: SDNode<"X86ISD::VMFPROUND", 185 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, f32>, 186 SDTCVecEltisVT<1, f64>, 187 SDTCisSameSizeAs<0, 1>, 188 SDTCisSameAs<0, 2>, 189 SDTCVecEltisVT<3, i1>, 190 SDTCisSameNumEltsAs<1, 3>]>>; 191 192def X86vshiftimm : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 193 SDTCisVT<2, i8>, SDTCisInt<0>]>; 194 195def X86vshldq : SDNode<"X86ISD::VSHLDQ", X86vshiftimm>; 196def X86vshrdq : SDNode<"X86ISD::VSRLDQ", X86vshiftimm>; 197def X86pcmpeq : SDNode<"X86ISD::PCMPEQ", SDTIntBinOp, [SDNPCommutative]>; 198def X86pcmpgt : SDNode<"X86ISD::PCMPGT", SDTIntBinOp>; 199 200def X86cmpp : SDNode<"X86ISD::CMPP", SDTX86VFCMP>; 201def X86strict_cmpp : SDNode<"X86ISD::STRICT_CMPP", SDTX86VFCMP, [SDNPHasChain]>; 202def X86any_cmpp : PatFrags<(ops node:$src1, node:$src2, node:$src3), 203 [(X86strict_cmpp node:$src1, node:$src2, node:$src3), 204 (X86cmpp node:$src1, node:$src2, node:$src3)]>; 205 206def X86CmpMaskCC : 207 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCVecEltisVT<0, i1>, 208 SDTCisVec<1>, SDTCisSameAs<2, 1>, 209 SDTCisSameNumEltsAs<0, 1>, SDTCisVT<3, i8>]>; 210def X86MaskCmpMaskCC : 211 SDTypeProfile<1, 4, [SDTCisVec<0>, SDTCVecEltisVT<0, i1>, 212 SDTCisVec<1>, SDTCisSameAs<2, 1>, 213 SDTCisSameNumEltsAs<0, 1>, SDTCisVT<3, i8>, SDTCisSameAs<4, 0>]>; 214def X86CmpMaskCCScalar : 215 SDTypeProfile<1, 3, [SDTCisInt<0>, SDTCisFP<1>, SDTCisSameAs<1, 2>, 216 SDTCisVT<3, i8>]>; 217 218def X86cmpm : SDNode<"X86ISD::CMPM", X86CmpMaskCC>; 219def X86cmpmm : SDNode<"X86ISD::CMPMM", X86MaskCmpMaskCC>; 220def X86strict_cmpm : SDNode<"X86ISD::STRICT_CMPM", X86CmpMaskCC, [SDNPHasChain]>; 221def X86any_cmpm : PatFrags<(ops node:$src1, node:$src2, node:$src3), 222 [(X86strict_cmpm node:$src1, node:$src2, node:$src3), 223 (X86cmpm node:$src1, node:$src2, node:$src3)]>; 224def X86cmpmmSAE : SDNode<"X86ISD::CMPMM_SAE", X86MaskCmpMaskCC>; 225def X86cmpms : SDNode<"X86ISD::FSETCCM", X86CmpMaskCCScalar>; 226def X86cmpmsSAE : SDNode<"X86ISD::FSETCCM_SAE", X86CmpMaskCCScalar>; 227 228def X86phminpos: SDNode<"X86ISD::PHMINPOS", 229 SDTypeProfile<1, 1, [SDTCisVT<0, v8i16>, SDTCisVT<1, v8i16>]>>; 230 231def X86vshiftuniform : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 232 SDTCisVec<2>, SDTCisInt<0>, 233 SDTCisInt<2>]>; 234 235def X86vshl : SDNode<"X86ISD::VSHL", X86vshiftuniform>; 236def X86vsrl : SDNode<"X86ISD::VSRL", X86vshiftuniform>; 237def X86vsra : SDNode<"X86ISD::VSRA", X86vshiftuniform>; 238 239def X86vshiftvariable : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 240 SDTCisSameAs<0,2>, SDTCisInt<0>]>; 241 242def X86vshlv : SDNode<"X86ISD::VSHLV", X86vshiftvariable>; 243def X86vsrlv : SDNode<"X86ISD::VSRLV", X86vshiftvariable>; 244def X86vsrav : SDNode<"X86ISD::VSRAV", X86vshiftvariable>; 245 246def X86vshli : SDNode<"X86ISD::VSHLI", X86vshiftimm>; 247def X86vsrli : SDNode<"X86ISD::VSRLI", X86vshiftimm>; 248def X86vsrai : SDNode<"X86ISD::VSRAI", X86vshiftimm>; 249 250def X86kshiftl : SDNode<"X86ISD::KSHIFTL", 251 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>, 252 SDTCisSameAs<0, 1>, 253 SDTCisVT<2, i8>]>>; 254def X86kshiftr : SDNode<"X86ISD::KSHIFTR", 255 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>, 256 SDTCisSameAs<0, 1>, 257 SDTCisVT<2, i8>]>>; 258 259def X86kadd : SDNode<"X86ISD::KADD", SDTIntBinOp, [SDNPCommutative]>; 260 261def X86vrotli : SDNode<"X86ISD::VROTLI", X86vshiftimm>; 262def X86vrotri : SDNode<"X86ISD::VROTRI", X86vshiftimm>; 263 264def X86vpshl : SDNode<"X86ISD::VPSHL", X86vshiftvariable>; 265def X86vpsha : SDNode<"X86ISD::VPSHA", X86vshiftvariable>; 266 267def X86vpcom : SDNode<"X86ISD::VPCOM", 268 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 269 SDTCisSameAs<0,2>, 270 SDTCisVT<3, i8>, SDTCisInt<0>]>>; 271def X86vpcomu : SDNode<"X86ISD::VPCOMU", 272 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 273 SDTCisSameAs<0,2>, 274 SDTCisVT<3, i8>, SDTCisInt<0>]>>; 275def X86vpermil2 : SDNode<"X86ISD::VPERMIL2", 276 SDTypeProfile<1, 4, [SDTCisVec<0>, SDTCisSameAs<0,1>, 277 SDTCisSameAs<0,2>, 278 SDTCisFP<0>, SDTCisInt<3>, 279 SDTCisSameNumEltsAs<0, 3>, 280 SDTCisSameSizeAs<0,3>, 281 SDTCisVT<4, i8>]>>; 282def X86vpperm : SDNode<"X86ISD::VPPERM", 283 SDTypeProfile<1, 3, [SDTCisVT<0, v16i8>, SDTCisSameAs<0,1>, 284 SDTCisSameAs<0,2>, SDTCisSameAs<0, 3>]>>; 285 286def SDTX86CmpPTest : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, 287 SDTCisVec<1>, 288 SDTCisSameAs<2, 1>]>; 289 290def X86mulhrs : SDNode<"X86ISD::MULHRS", SDTIntBinOp, [SDNPCommutative]>; 291def X86avg : SDNode<"X86ISD::AVG" , SDTIntBinOp, [SDNPCommutative]>; 292def X86ptest : SDNode<"X86ISD::PTEST", SDTX86CmpPTest>; 293def X86testp : SDNode<"X86ISD::TESTP", SDTX86CmpPTest>; 294def X86kortest : SDNode<"X86ISD::KORTEST", SDTX86CmpPTest>; 295def X86ktest : SDNode<"X86ISD::KTEST", SDTX86CmpPTest>; 296 297def X86movmsk : SDNode<"X86ISD::MOVMSK", 298 SDTypeProfile<1, 1, [SDTCisVT<0, i32>, SDTCisVec<1>]>>; 299 300def X86selects : SDNode<"X86ISD::SELECTS", 301 SDTypeProfile<1, 3, [SDTCisVT<1, v1i1>, 302 SDTCisSameAs<0, 2>, 303 SDTCisSameAs<2, 3>]>>; 304 305def X86pmuludq : SDNode<"X86ISD::PMULUDQ", 306 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>, 307 SDTCisSameAs<0,1>, 308 SDTCisSameAs<1,2>]>, 309 [SDNPCommutative]>; 310def X86pmuldq : SDNode<"X86ISD::PMULDQ", 311 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>, 312 SDTCisSameAs<0,1>, 313 SDTCisSameAs<1,2>]>, 314 [SDNPCommutative]>; 315 316def X86extrqi : SDNode<"X86ISD::EXTRQI", 317 SDTypeProfile<1, 3, [SDTCisVT<0, v2i64>, SDTCisSameAs<0,1>, 318 SDTCisVT<2, i8>, SDTCisVT<3, i8>]>>; 319def X86insertqi : SDNode<"X86ISD::INSERTQI", 320 SDTypeProfile<1, 4, [SDTCisVT<0, v2i64>, SDTCisSameAs<0,1>, 321 SDTCisSameAs<1,2>, SDTCisVT<3, i8>, 322 SDTCisVT<4, i8>]>>; 323 324// Specific shuffle nodes - At some point ISD::VECTOR_SHUFFLE will always get 325// translated into one of the target nodes below during lowering. 326// Note: this is a work in progress... 327def SDTShuff1Op : SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisSameAs<0,1>]>; 328def SDTShuff2Op : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 329 SDTCisSameAs<0,2>]>; 330def SDTShuff2OpFP : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisFP<0>, 331 SDTCisSameAs<0,1>, SDTCisSameAs<0,2>]>; 332 333def SDTShuff2OpM : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 334 SDTCisFP<0>, SDTCisInt<2>, 335 SDTCisSameNumEltsAs<0,2>, 336 SDTCisSameSizeAs<0,2>]>; 337def SDTShuff2OpI : SDTypeProfile<1, 2, [SDTCisVec<0>, 338 SDTCisSameAs<0,1>, SDTCisVT<2, i8>]>; 339def SDTShuff3OpI : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 340 SDTCisSameAs<0,2>, SDTCisVT<3, i8>]>; 341def SDTFPBinOpImm: SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>, 342 SDTCisSameAs<0,1>, 343 SDTCisSameAs<0,2>, 344 SDTCisVT<3, i32>]>; 345def SDTFPTernaryOpImm: SDTypeProfile<1, 4, [SDTCisFP<0>, SDTCisSameAs<0,1>, 346 SDTCisSameAs<0,2>, 347 SDTCisInt<3>, 348 SDTCisSameSizeAs<0, 3>, 349 SDTCisSameNumEltsAs<0, 3>, 350 SDTCisVT<4, i32>]>; 351def SDTFPUnaryOpImm: SDTypeProfile<1, 2, [SDTCisFP<0>, 352 SDTCisSameAs<0,1>, 353 SDTCisVT<2, i32>]>; 354 355def SDTVBroadcast : SDTypeProfile<1, 1, [SDTCisVec<0>]>; 356def SDTVBroadcastm : SDTypeProfile<1, 1, [SDTCisVec<0>, 357 SDTCisInt<0>, SDTCisInt<1>]>; 358 359def SDTBlend : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 360 SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>; 361 362def SDTTernlog : SDTypeProfile<1, 4, [SDTCisInt<0>, SDTCisVec<0>, 363 SDTCisSameAs<0,1>, SDTCisSameAs<0,2>, 364 SDTCisSameAs<0,3>, SDTCisVT<4, i8>]>; 365 366def SDTFPBinOpRound : SDTypeProfile<1, 3, [ // fadd_round, fmul_round, etc. 367 SDTCisSameAs<0, 1>, SDTCisSameAs<0, 2>, SDTCisFP<0>, SDTCisVT<3, i32>]>; 368 369def SDTFPUnaryOpRound : SDTypeProfile<1, 2, [ // fsqrt_round, fgetexp_round, etc. 370 SDTCisSameAs<0, 1>, SDTCisFP<0>, SDTCisVT<2, i32>]>; 371 372def SDTFmaRound : SDTypeProfile<1, 4, [SDTCisSameAs<0,1>, 373 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>, 374 SDTCisFP<0>, SDTCisVT<4, i32>]>; 375 376def X86PAlignr : SDNode<"X86ISD::PALIGNR", 377 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i8>, 378 SDTCisSameAs<0,1>, 379 SDTCisSameAs<0,2>, 380 SDTCisVT<3, i8>]>>; 381def X86VAlign : SDNode<"X86ISD::VALIGN", SDTShuff3OpI>; 382 383def X86VShld : SDNode<"X86ISD::VSHLD", SDTShuff3OpI>; 384def X86VShrd : SDNode<"X86ISD::VSHRD", SDTShuff3OpI>; 385def X86VShldv : SDNode<"X86ISD::VSHLDV", 386 SDTypeProfile<1, 3, [SDTCisVec<0>, 387 SDTCisSameAs<0,1>, 388 SDTCisSameAs<0,2>, 389 SDTCisSameAs<0,3>]>>; 390def X86VShrdv : SDNode<"X86ISD::VSHRDV", 391 SDTypeProfile<1, 3, [SDTCisVec<0>, 392 SDTCisSameAs<0,1>, 393 SDTCisSameAs<0,2>, 394 SDTCisSameAs<0,3>]>>; 395 396def X86Conflict : SDNode<"X86ISD::CONFLICT", SDTIntUnaryOp>; 397 398def X86PShufd : SDNode<"X86ISD::PSHUFD", SDTShuff2OpI>; 399def X86PShufhw : SDNode<"X86ISD::PSHUFHW", SDTShuff2OpI>; 400def X86PShuflw : SDNode<"X86ISD::PSHUFLW", SDTShuff2OpI>; 401 402def X86Shufp : SDNode<"X86ISD::SHUFP", SDTShuff3OpI>; 403def X86Shuf128 : SDNode<"X86ISD::SHUF128", SDTShuff3OpI>; 404 405def X86Movddup : SDNode<"X86ISD::MOVDDUP", SDTShuff1Op>; 406def X86Movshdup : SDNode<"X86ISD::MOVSHDUP", SDTShuff1Op>; 407def X86Movsldup : SDNode<"X86ISD::MOVSLDUP", SDTShuff1Op>; 408 409def X86Movsd : SDNode<"X86ISD::MOVSD", 410 SDTypeProfile<1, 2, [SDTCisVT<0, v2f64>, 411 SDTCisVT<1, v2f64>, 412 SDTCisVT<2, v2f64>]>>; 413def X86Movss : SDNode<"X86ISD::MOVSS", 414 SDTypeProfile<1, 2, [SDTCisVT<0, v4f32>, 415 SDTCisVT<1, v4f32>, 416 SDTCisVT<2, v4f32>]>>; 417 418def X86Movlhps : SDNode<"X86ISD::MOVLHPS", 419 SDTypeProfile<1, 2, [SDTCisVT<0, v4f32>, 420 SDTCisVT<1, v4f32>, 421 SDTCisVT<2, v4f32>]>>; 422def X86Movhlps : SDNode<"X86ISD::MOVHLPS", 423 SDTypeProfile<1, 2, [SDTCisVT<0, v4f32>, 424 SDTCisVT<1, v4f32>, 425 SDTCisVT<2, v4f32>]>>; 426 427def SDTPack : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisInt<0>, 428 SDTCisVec<1>, SDTCisInt<1>, 429 SDTCisSameSizeAs<0,1>, 430 SDTCisSameAs<1,2>, 431 SDTCisOpSmallerThanOp<0, 1>]>; 432def X86Packss : SDNode<"X86ISD::PACKSS", SDTPack>; 433def X86Packus : SDNode<"X86ISD::PACKUS", SDTPack>; 434 435def X86Unpckl : SDNode<"X86ISD::UNPCKL", SDTShuff2Op>; 436def X86Unpckh : SDNode<"X86ISD::UNPCKH", SDTShuff2Op>; 437 438def X86vpmaddubsw : SDNode<"X86ISD::VPMADDUBSW", 439 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i16>, 440 SDTCVecEltisVT<1, i8>, 441 SDTCisSameSizeAs<0,1>, 442 SDTCisSameAs<1,2>]>>; 443def X86vpmaddwd : SDNode<"X86ISD::VPMADDWD", 444 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i32>, 445 SDTCVecEltisVT<1, i16>, 446 SDTCisSameSizeAs<0,1>, 447 SDTCisSameAs<1,2>]>, 448 [SDNPCommutative]>; 449 450def X86VPermilpv : SDNode<"X86ISD::VPERMILPV", SDTShuff2OpM>; 451def X86VPermilpi : SDNode<"X86ISD::VPERMILPI", SDTShuff2OpI>; 452def X86VPermv : SDNode<"X86ISD::VPERMV", 453 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisInt<1>, 454 SDTCisSameNumEltsAs<0,1>, 455 SDTCisSameSizeAs<0,1>, 456 SDTCisSameAs<0,2>]>>; 457def X86VPermi : SDNode<"X86ISD::VPERMI", SDTShuff2OpI>; 458def X86VPermt2 : SDNode<"X86ISD::VPERMV3", 459 SDTypeProfile<1, 3, [SDTCisVec<0>, 460 SDTCisSameAs<0,1>, SDTCisInt<2>, 461 SDTCisVec<2>, SDTCisSameNumEltsAs<0, 2>, 462 SDTCisSameSizeAs<0,2>, 463 SDTCisSameAs<0,3>]>, []>; 464 465def X86vpternlog : SDNode<"X86ISD::VPTERNLOG", SDTTernlog>; 466 467def X86VPerm2x128 : SDNode<"X86ISD::VPERM2X128", SDTShuff3OpI>; 468 469def X86VFixupimm : SDNode<"X86ISD::VFIXUPIMM", SDTFPTernaryOpImm>; 470def X86VFixupimmSAE : SDNode<"X86ISD::VFIXUPIMM_SAE", SDTFPTernaryOpImm>; 471def X86VFixupimms : SDNode<"X86ISD::VFIXUPIMMS", SDTFPTernaryOpImm>; 472def X86VFixupimmSAEs : SDNode<"X86ISD::VFIXUPIMMS_SAE", SDTFPTernaryOpImm>; 473def X86VRange : SDNode<"X86ISD::VRANGE", SDTFPBinOpImm>; 474def X86VRangeSAE : SDNode<"X86ISD::VRANGE_SAE", SDTFPBinOpImm>; 475def X86VReduce : SDNode<"X86ISD::VREDUCE", SDTFPUnaryOpImm>; 476def X86VReduceSAE : SDNode<"X86ISD::VREDUCE_SAE", SDTFPUnaryOpImm>; 477def X86VRndScale : SDNode<"X86ISD::VRNDSCALE", SDTFPUnaryOpImm>; 478def X86strict_VRndScale : SDNode<"X86ISD::STRICT_VRNDSCALE", SDTFPUnaryOpImm, 479 [SDNPHasChain]>; 480def X86any_VRndScale : PatFrags<(ops node:$src1, node:$src2), 481 [(X86strict_VRndScale node:$src1, node:$src2), 482 (X86VRndScale node:$src1, node:$src2)]>; 483 484def X86VRndScaleSAE: SDNode<"X86ISD::VRNDSCALE_SAE", SDTFPUnaryOpImm>; 485def X86VGetMant : SDNode<"X86ISD::VGETMANT", SDTFPUnaryOpImm>; 486def X86VGetMantSAE : SDNode<"X86ISD::VGETMANT_SAE", SDTFPUnaryOpImm>; 487def X86Vfpclass : SDNode<"X86ISD::VFPCLASS", 488 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>, 489 SDTCisFP<1>, 490 SDTCisSameNumEltsAs<0,1>, 491 SDTCisVT<2, i32>]>, []>; 492def X86Vfpclasss : SDNode<"X86ISD::VFPCLASSS", 493 SDTypeProfile<1, 2, [SDTCisVT<0, v1i1>, 494 SDTCisFP<1>, SDTCisVT<2, i32>]>,[]>; 495 496def X86SubVBroadcast : SDNode<"X86ISD::SUBV_BROADCAST", 497 SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 498 SDTCisSubVecOfVec<1, 0>]>, []>; 499 500def X86VBroadcast : SDNode<"X86ISD::VBROADCAST", SDTVBroadcast>; 501def X86VBroadcastm : SDNode<"X86ISD::VBROADCASTM", SDTVBroadcastm>; 502 503def X86Blendi : SDNode<"X86ISD::BLENDI", SDTBlend>; 504def X86Blendv : SDNode<"X86ISD::BLENDV", 505 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisInt<1>, 506 SDTCisSameAs<0, 2>, 507 SDTCisSameAs<2, 3>, 508 SDTCisSameNumEltsAs<0, 1>, 509 SDTCisSameSizeAs<0, 1>]>>; 510 511def X86Addsub : SDNode<"X86ISD::ADDSUB", SDTFPBinOp>; 512 513def X86faddRnd : SDNode<"X86ISD::FADD_RND", SDTFPBinOpRound>; 514def X86fadds : SDNode<"X86ISD::FADDS", SDTFPBinOp>; 515def X86faddRnds : SDNode<"X86ISD::FADDS_RND", SDTFPBinOpRound>; 516def X86fsubRnd : SDNode<"X86ISD::FSUB_RND", SDTFPBinOpRound>; 517def X86fsubs : SDNode<"X86ISD::FSUBS", SDTFPBinOp>; 518def X86fsubRnds : SDNode<"X86ISD::FSUBS_RND", SDTFPBinOpRound>; 519def X86fmulRnd : SDNode<"X86ISD::FMUL_RND", SDTFPBinOpRound>; 520def X86fmuls : SDNode<"X86ISD::FMULS", SDTFPBinOp>; 521def X86fmulRnds : SDNode<"X86ISD::FMULS_RND", SDTFPBinOpRound>; 522def X86fdivRnd : SDNode<"X86ISD::FDIV_RND", SDTFPBinOpRound>; 523def X86fdivs : SDNode<"X86ISD::FDIVS", SDTFPBinOp>; 524def X86fdivRnds : SDNode<"X86ISD::FDIVS_RND", SDTFPBinOpRound>; 525def X86fmaxSAE : SDNode<"X86ISD::FMAX_SAE", SDTFPBinOp>; 526def X86fmaxSAEs : SDNode<"X86ISD::FMAXS_SAE", SDTFPBinOp>; 527def X86fminSAE : SDNode<"X86ISD::FMIN_SAE", SDTFPBinOp>; 528def X86fminSAEs : SDNode<"X86ISD::FMINS_SAE", SDTFPBinOp>; 529def X86scalef : SDNode<"X86ISD::SCALEF", SDTFPBinOp>; 530def X86scalefRnd : SDNode<"X86ISD::SCALEF_RND", SDTFPBinOpRound>; 531def X86scalefs : SDNode<"X86ISD::SCALEFS", SDTFPBinOp>; 532def X86scalefsRnd: SDNode<"X86ISD::SCALEFS_RND", SDTFPBinOpRound>; 533def X86fsqrtRnd : SDNode<"X86ISD::FSQRT_RND", SDTFPUnaryOpRound>; 534def X86fsqrts : SDNode<"X86ISD::FSQRTS", SDTFPBinOp>; 535def X86fsqrtRnds : SDNode<"X86ISD::FSQRTS_RND", SDTFPBinOpRound>; 536def X86fgetexp : SDNode<"X86ISD::FGETEXP", SDTFPUnaryOp>; 537def X86fgetexpSAE : SDNode<"X86ISD::FGETEXP_SAE", SDTFPUnaryOp>; 538def X86fgetexps : SDNode<"X86ISD::FGETEXPS", SDTFPBinOp>; 539def X86fgetexpSAEs : SDNode<"X86ISD::FGETEXPS_SAE", SDTFPBinOp>; 540 541def X86Fmadd : SDNode<"ISD::FMA", SDTFPTernaryOp, [SDNPCommutative]>; 542def X86strict_Fmadd : SDNode<"ISD::STRICT_FMA", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>; 543def X86any_Fmadd : PatFrags<(ops node:$src1, node:$src2, node:$src3), 544 [(X86strict_Fmadd node:$src1, node:$src2, node:$src3), 545 (X86Fmadd node:$src1, node:$src2, node:$src3)]>; 546def X86Fnmadd : SDNode<"X86ISD::FNMADD", SDTFPTernaryOp, [SDNPCommutative]>; 547def X86strict_Fnmadd : SDNode<"X86ISD::STRICT_FNMADD", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>; 548def X86any_Fnmadd : PatFrags<(ops node:$src1, node:$src2, node:$src3), 549 [(X86strict_Fnmadd node:$src1, node:$src2, node:$src3), 550 (X86Fnmadd node:$src1, node:$src2, node:$src3)]>; 551def X86Fmsub : SDNode<"X86ISD::FMSUB", SDTFPTernaryOp, [SDNPCommutative]>; 552def X86strict_Fmsub : SDNode<"X86ISD::STRICT_FMSUB", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>; 553def X86any_Fmsub : PatFrags<(ops node:$src1, node:$src2, node:$src3), 554 [(X86strict_Fmsub node:$src1, node:$src2, node:$src3), 555 (X86Fmsub node:$src1, node:$src2, node:$src3)]>; 556def X86Fnmsub : SDNode<"X86ISD::FNMSUB", SDTFPTernaryOp, [SDNPCommutative]>; 557def X86strict_Fnmsub : SDNode<"X86ISD::STRICT_FNMSUB", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>; 558def X86any_Fnmsub : PatFrags<(ops node:$src1, node:$src2, node:$src3), 559 [(X86strict_Fnmsub node:$src1, node:$src2, node:$src3), 560 (X86Fnmsub node:$src1, node:$src2, node:$src3)]>; 561def X86Fmaddsub : SDNode<"X86ISD::FMADDSUB", SDTFPTernaryOp, [SDNPCommutative]>; 562def X86Fmsubadd : SDNode<"X86ISD::FMSUBADD", SDTFPTernaryOp, [SDNPCommutative]>; 563 564def X86FmaddRnd : SDNode<"X86ISD::FMADD_RND", SDTFmaRound, [SDNPCommutative]>; 565def X86FnmaddRnd : SDNode<"X86ISD::FNMADD_RND", SDTFmaRound, [SDNPCommutative]>; 566def X86FmsubRnd : SDNode<"X86ISD::FMSUB_RND", SDTFmaRound, [SDNPCommutative]>; 567def X86FnmsubRnd : SDNode<"X86ISD::FNMSUB_RND", SDTFmaRound, [SDNPCommutative]>; 568def X86FmaddsubRnd : SDNode<"X86ISD::FMADDSUB_RND", SDTFmaRound, [SDNPCommutative]>; 569def X86FmsubaddRnd : SDNode<"X86ISD::FMSUBADD_RND", SDTFmaRound, [SDNPCommutative]>; 570 571def X86vp2intersect : SDNode<"X86ISD::VP2INTERSECT", 572 SDTypeProfile<1, 2, [SDTCisVT<0, untyped>, 573 SDTCisVec<1>, SDTCisSameAs<1, 2>]>>; 574 575def SDTIFma : SDTypeProfile<1, 3, [SDTCisInt<0>, SDTCisSameAs<0,1>, 576 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>]>; 577def x86vpmadd52l : SDNode<"X86ISD::VPMADD52L", SDTIFma, [SDNPCommutative]>; 578def x86vpmadd52h : SDNode<"X86ISD::VPMADD52H", SDTIFma, [SDNPCommutative]>; 579 580def X86rsqrt14 : SDNode<"X86ISD::RSQRT14", SDTFPUnaryOp>; 581def X86rcp14 : SDNode<"X86ISD::RCP14", SDTFPUnaryOp>; 582 583// VNNI 584def SDTVnni : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 585 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>]>; 586def X86Vpdpbusd : SDNode<"X86ISD::VPDPBUSD", SDTVnni>; 587def X86Vpdpbusds : SDNode<"X86ISD::VPDPBUSDS", SDTVnni>; 588def X86Vpdpwssd : SDNode<"X86ISD::VPDPWSSD", SDTVnni>; 589def X86Vpdpwssds : SDNode<"X86ISD::VPDPWSSDS", SDTVnni>; 590 591def X86rsqrt28 : SDNode<"X86ISD::RSQRT28", SDTFPUnaryOp>; 592def X86rsqrt28SAE: SDNode<"X86ISD::RSQRT28_SAE", SDTFPUnaryOp>; 593def X86rcp28 : SDNode<"X86ISD::RCP28", SDTFPUnaryOp>; 594def X86rcp28SAE : SDNode<"X86ISD::RCP28_SAE", SDTFPUnaryOp>; 595def X86exp2 : SDNode<"X86ISD::EXP2", SDTFPUnaryOp>; 596def X86exp2SAE : SDNode<"X86ISD::EXP2_SAE", SDTFPUnaryOp>; 597 598def X86rsqrt14s : SDNode<"X86ISD::RSQRT14S", SDTFPBinOp>; 599def X86rcp14s : SDNode<"X86ISD::RCP14S", SDTFPBinOp>; 600def X86rsqrt28s : SDNode<"X86ISD::RSQRT28S", SDTFPBinOp>; 601def X86rsqrt28SAEs : SDNode<"X86ISD::RSQRT28S_SAE", SDTFPBinOp>; 602def X86rcp28s : SDNode<"X86ISD::RCP28S", SDTFPBinOp>; 603def X86rcp28SAEs : SDNode<"X86ISD::RCP28S_SAE", SDTFPBinOp>; 604def X86Ranges : SDNode<"X86ISD::VRANGES", SDTFPBinOpImm>; 605def X86RndScales : SDNode<"X86ISD::VRNDSCALES", SDTFPBinOpImm>; 606def X86Reduces : SDNode<"X86ISD::VREDUCES", SDTFPBinOpImm>; 607def X86GetMants : SDNode<"X86ISD::VGETMANTS", SDTFPBinOpImm>; 608def X86RangesSAE : SDNode<"X86ISD::VRANGES_SAE", SDTFPBinOpImm>; 609def X86RndScalesSAE : SDNode<"X86ISD::VRNDSCALES_SAE", SDTFPBinOpImm>; 610def X86ReducesSAE : SDNode<"X86ISD::VREDUCES_SAE", SDTFPBinOpImm>; 611def X86GetMantsSAE : SDNode<"X86ISD::VGETMANTS_SAE", SDTFPBinOpImm>; 612 613def X86compress: SDNode<"X86ISD::COMPRESS", SDTypeProfile<1, 3, 614 [SDTCisSameAs<0, 1>, SDTCisVec<1>, 615 SDTCisSameAs<0, 2>, SDTCVecEltisVT<3, i1>, 616 SDTCisSameNumEltsAs<0, 3>]>, []>; 617def X86expand : SDNode<"X86ISD::EXPAND", SDTypeProfile<1, 3, 618 [SDTCisSameAs<0, 1>, SDTCisVec<1>, 619 SDTCisSameAs<0, 2>, SDTCVecEltisVT<3, i1>, 620 SDTCisSameNumEltsAs<0, 3>]>, []>; 621 622// vpshufbitqmb 623def X86Vpshufbitqmb : SDNode<"X86ISD::VPSHUFBITQMB", 624 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 625 SDTCisSameAs<1,2>, 626 SDTCVecEltisVT<0,i1>, 627 SDTCisSameNumEltsAs<0,1>]>>; 628 629def SDTintToFP: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisFP<0>, 630 SDTCisSameAs<0,1>, SDTCisInt<2>]>; 631def SDTintToFPRound: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisFP<0>, 632 SDTCisSameAs<0,1>, SDTCisInt<2>, 633 SDTCisVT<3, i32>]>; 634 635def SDTFloatToInt: SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 636 SDTCisInt<0>, SDTCisFP<1>]>; 637def SDTFloatToIntRnd: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 638 SDTCisInt<0>, SDTCisFP<1>, 639 SDTCisVT<2, i32>]>; 640def SDTSFloatToInt: SDTypeProfile<1, 1, [SDTCisInt<0>, SDTCisFP<1>, 641 SDTCisVec<1>]>; 642def SDTSFloatToIntRnd: SDTypeProfile<1, 2, [SDTCisInt<0>, SDTCisFP<1>, 643 SDTCisVec<1>, SDTCisVT<2, i32>]>; 644 645def SDTVintToFP: SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 646 SDTCisFP<0>, SDTCisInt<1>]>; 647def SDTVintToFPRound: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 648 SDTCisFP<0>, SDTCisInt<1>, 649 SDTCisVT<2, i32>]>; 650 651// Scalar 652def X86SintToFp : SDNode<"X86ISD::SCALAR_SINT_TO_FP", SDTintToFP>; 653def X86SintToFpRnd : SDNode<"X86ISD::SCALAR_SINT_TO_FP_RND", SDTintToFPRound>; 654def X86UintToFp : SDNode<"X86ISD::SCALAR_UINT_TO_FP", SDTintToFP>; 655def X86UintToFpRnd : SDNode<"X86ISD::SCALAR_UINT_TO_FP_RND", SDTintToFPRound>; 656 657def X86cvtts2Int : SDNode<"X86ISD::CVTTS2SI", SDTSFloatToInt>; 658def X86cvtts2UInt : SDNode<"X86ISD::CVTTS2UI", SDTSFloatToInt>; 659def X86cvtts2IntSAE : SDNode<"X86ISD::CVTTS2SI_SAE", SDTSFloatToInt>; 660def X86cvtts2UIntSAE : SDNode<"X86ISD::CVTTS2UI_SAE", SDTSFloatToInt>; 661 662def X86cvts2si : SDNode<"X86ISD::CVTS2SI", SDTSFloatToInt>; 663def X86cvts2usi : SDNode<"X86ISD::CVTS2UI", SDTSFloatToInt>; 664def X86cvts2siRnd : SDNode<"X86ISD::CVTS2SI_RND", SDTSFloatToIntRnd>; 665def X86cvts2usiRnd : SDNode<"X86ISD::CVTS2UI_RND", SDTSFloatToIntRnd>; 666 667// Vector with rounding mode 668 669// cvtt fp-to-int staff 670def X86cvttp2siSAE : SDNode<"X86ISD::CVTTP2SI_SAE", SDTFloatToInt>; 671def X86cvttp2uiSAE : SDNode<"X86ISD::CVTTP2UI_SAE", SDTFloatToInt>; 672 673def X86VSintToFpRnd : SDNode<"X86ISD::SINT_TO_FP_RND", SDTVintToFPRound>; 674def X86VUintToFpRnd : SDNode<"X86ISD::UINT_TO_FP_RND", SDTVintToFPRound>; 675 676// cvt fp-to-int staff 677def X86cvtp2IntRnd : SDNode<"X86ISD::CVTP2SI_RND", SDTFloatToIntRnd>; 678def X86cvtp2UIntRnd : SDNode<"X86ISD::CVTP2UI_RND", SDTFloatToIntRnd>; 679 680// Vector without rounding mode 681 682// cvtt fp-to-int staff 683def X86cvttp2si : SDNode<"X86ISD::CVTTP2SI", SDTFloatToInt>; 684def X86cvttp2ui : SDNode<"X86ISD::CVTTP2UI", SDTFloatToInt>; 685def X86strict_cvttp2si : SDNode<"X86ISD::STRICT_CVTTP2SI", SDTFloatToInt, [SDNPHasChain]>; 686def X86strict_cvttp2ui : SDNode<"X86ISD::STRICT_CVTTP2UI", SDTFloatToInt, [SDNPHasChain]>; 687def X86any_cvttp2si : PatFrags<(ops node:$src), 688 [(X86strict_cvttp2si node:$src), 689 (X86cvttp2si node:$src)]>; 690def X86any_cvttp2ui : PatFrags<(ops node:$src), 691 [(X86strict_cvttp2ui node:$src), 692 (X86cvttp2ui node:$src)]>; 693 694def X86VSintToFP : SDNode<"X86ISD::CVTSI2P", SDTVintToFP>; 695def X86VUintToFP : SDNode<"X86ISD::CVTUI2P", SDTVintToFP>; 696def X86strict_VSintToFP : SDNode<"X86ISD::STRICT_CVTSI2P", SDTVintToFP, [SDNPHasChain]>; 697def X86strict_VUintToFP : SDNode<"X86ISD::STRICT_CVTUI2P", SDTVintToFP, [SDNPHasChain]>; 698def X86any_VSintToFP : PatFrags<(ops node:$src), 699 [(X86strict_VSintToFP node:$src), 700 (X86VSintToFP node:$src)]>; 701def X86any_VUintToFP : PatFrags<(ops node:$src), 702 [(X86strict_VUintToFP node:$src), 703 (X86VUintToFP node:$src)]>; 704 705 706// cvt int-to-fp staff 707def X86cvtp2Int : SDNode<"X86ISD::CVTP2SI", SDTFloatToInt>; 708def X86cvtp2UInt : SDNode<"X86ISD::CVTP2UI", SDTFloatToInt>; 709 710 711// Masked versions of above 712def SDTMVintToFP: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>, 713 SDTCisFP<0>, SDTCisInt<1>, 714 SDTCisSameSizeAs<0, 1>, 715 SDTCisSameAs<0, 2>, 716 SDTCVecEltisVT<3, i1>, 717 SDTCisSameNumEltsAs<1, 3>]>; 718def SDTMFloatToInt: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>, 719 SDTCisInt<0>, SDTCisFP<1>, 720 SDTCisSameSizeAs<0, 1>, 721 SDTCisSameAs<0, 2>, 722 SDTCVecEltisVT<3, i1>, 723 SDTCisSameNumEltsAs<1, 3>]>; 724 725def X86VMSintToFP : SDNode<"X86ISD::MCVTSI2P", SDTMVintToFP>; 726def X86VMUintToFP : SDNode<"X86ISD::MCVTUI2P", SDTMVintToFP>; 727 728def X86mcvtp2Int : SDNode<"X86ISD::MCVTP2SI", SDTMFloatToInt>; 729def X86mcvtp2UInt : SDNode<"X86ISD::MCVTP2UI", SDTMFloatToInt>; 730def X86mcvttp2si : SDNode<"X86ISD::MCVTTP2SI", SDTMFloatToInt>; 731def X86mcvttp2ui : SDNode<"X86ISD::MCVTTP2UI", SDTMFloatToInt>; 732 733def SDTcvtph2ps : SDTypeProfile<1, 1, [SDTCVecEltisVT<0, f32>, 734 SDTCVecEltisVT<1, i16>]>; 735def X86cvtph2ps : SDNode<"X86ISD::CVTPH2PS", SDTcvtph2ps>; 736def X86strict_cvtph2ps : SDNode<"X86ISD::STRICT_CVTPH2PS", SDTcvtph2ps, 737 [SDNPHasChain]>; 738def X86any_cvtph2ps : PatFrags<(ops node:$src), 739 [(X86strict_cvtph2ps node:$src), 740 (X86cvtph2ps node:$src)]>; 741 742def X86cvtph2psSAE : SDNode<"X86ISD::CVTPH2PS_SAE", SDTcvtph2ps>; 743 744def SDTcvtps2ph : SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i16>, 745 SDTCVecEltisVT<1, f32>, 746 SDTCisVT<2, i32>]>; 747def X86cvtps2ph : SDNode<"X86ISD::CVTPS2PH", SDTcvtps2ph>; 748def X86strict_cvtps2ph : SDNode<"X86ISD::STRICT_CVTPS2PH", SDTcvtps2ph, 749 [SDNPHasChain]>; 750def X86any_cvtps2ph : PatFrags<(ops node:$src1, node:$src2), 751 [(X86strict_cvtps2ph node:$src1, node:$src2), 752 (X86cvtps2ph node:$src1, node:$src2)]>; 753 754def X86mcvtps2ph : SDNode<"X86ISD::MCVTPS2PH", 755 SDTypeProfile<1, 4, [SDTCVecEltisVT<0, i16>, 756 SDTCVecEltisVT<1, f32>, 757 SDTCisVT<2, i32>, 758 SDTCisSameAs<0, 3>, 759 SDTCVecEltisVT<4, i1>, 760 SDTCisSameNumEltsAs<1, 4>]> >; 761def X86vfpextSAE : SDNode<"X86ISD::VFPEXT_SAE", 762 SDTypeProfile<1, 1, [SDTCVecEltisVT<0, f64>, 763 SDTCVecEltisVT<1, f32>, 764 SDTCisOpSmallerThanOp<1, 0>]>>; 765def X86vfproundRnd: SDNode<"X86ISD::VFPROUND_RND", 766 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, f32>, 767 SDTCVecEltisVT<1, f64>, 768 SDTCisOpSmallerThanOp<0, 1>, 769 SDTCisVT<2, i32>]>>; 770 771// cvt fp to bfloat16 772def X86cvtne2ps2bf16 : SDNode<"X86ISD::CVTNE2PS2BF16", 773 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i16>, 774 SDTCVecEltisVT<1, f32>, 775 SDTCisSameSizeAs<0,1>, 776 SDTCisSameAs<1,2>]>>; 777def X86mcvtneps2bf16 : SDNode<"X86ISD::MCVTNEPS2BF16", 778 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i16>, 779 SDTCVecEltisVT<1, f32>, 780 SDTCisSameAs<0, 2>, 781 SDTCVecEltisVT<3, i1>, 782 SDTCisSameNumEltsAs<1, 3>]>>; 783def X86cvtneps2bf16 : SDNode<"X86ISD::CVTNEPS2BF16", 784 SDTypeProfile<1, 1, [SDTCVecEltisVT<0, i16>, 785 SDTCVecEltisVT<1, f32>]>>; 786def X86dpbf16ps : SDNode<"X86ISD::DPBF16PS", 787 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, f32>, 788 SDTCisSameAs<0,1>, 789 SDTCVecEltisVT<2, i32>, 790 SDTCisSameAs<2,3>]>>; 791 792// galois field arithmetic 793def X86GF2P8affineinvqb : SDNode<"X86ISD::GF2P8AFFINEINVQB", SDTBlend>; 794def X86GF2P8affineqb : SDNode<"X86ISD::GF2P8AFFINEQB", SDTBlend>; 795def X86GF2P8mulb : SDNode<"X86ISD::GF2P8MULB", SDTIntBinOp>; 796 797def SDTX86MaskedStore: SDTypeProfile<0, 3, [ // masked store 798 SDTCisVec<0>, SDTCisPtrTy<1>, SDTCisVec<2>, SDTCisSameNumEltsAs<0, 2> 799]>; 800 801//===----------------------------------------------------------------------===// 802// SSE pattern fragments 803//===----------------------------------------------------------------------===// 804 805// 128-bit load pattern fragments 806def loadv4f32 : PatFrag<(ops node:$ptr), (v4f32 (load node:$ptr))>; 807def loadv2f64 : PatFrag<(ops node:$ptr), (v2f64 (load node:$ptr))>; 808def loadv2i64 : PatFrag<(ops node:$ptr), (v2i64 (load node:$ptr))>; 809def loadv4i32 : PatFrag<(ops node:$ptr), (v4i32 (load node:$ptr))>; 810def loadv8i16 : PatFrag<(ops node:$ptr), (v8i16 (load node:$ptr))>; 811def loadv16i8 : PatFrag<(ops node:$ptr), (v16i8 (load node:$ptr))>; 812 813// 256-bit load pattern fragments 814def loadv8f32 : PatFrag<(ops node:$ptr), (v8f32 (load node:$ptr))>; 815def loadv4f64 : PatFrag<(ops node:$ptr), (v4f64 (load node:$ptr))>; 816def loadv4i64 : PatFrag<(ops node:$ptr), (v4i64 (load node:$ptr))>; 817def loadv8i32 : PatFrag<(ops node:$ptr), (v8i32 (load node:$ptr))>; 818def loadv16i16 : PatFrag<(ops node:$ptr), (v16i16 (load node:$ptr))>; 819def loadv32i8 : PatFrag<(ops node:$ptr), (v32i8 (load node:$ptr))>; 820 821// 512-bit load pattern fragments 822def loadv16f32 : PatFrag<(ops node:$ptr), (v16f32 (load node:$ptr))>; 823def loadv8f64 : PatFrag<(ops node:$ptr), (v8f64 (load node:$ptr))>; 824def loadv8i64 : PatFrag<(ops node:$ptr), (v8i64 (load node:$ptr))>; 825def loadv16i32 : PatFrag<(ops node:$ptr), (v16i32 (load node:$ptr))>; 826def loadv32i16 : PatFrag<(ops node:$ptr), (v32i16 (load node:$ptr))>; 827def loadv64i8 : PatFrag<(ops node:$ptr), (v64i8 (load node:$ptr))>; 828 829// 128-/256-/512-bit extload pattern fragments 830def extloadv2f32 : PatFrag<(ops node:$ptr), (extloadvf32 node:$ptr)>; 831def extloadv4f32 : PatFrag<(ops node:$ptr), (extloadvf32 node:$ptr)>; 832def extloadv8f32 : PatFrag<(ops node:$ptr), (extloadvf32 node:$ptr)>; 833 834// Like 'store', but always requires vector size alignment. 835def alignedstore : PatFrag<(ops node:$val, node:$ptr), 836 (store node:$val, node:$ptr), [{ 837 auto *St = cast<StoreSDNode>(N); 838 return St->getAlignment() >= St->getMemoryVT().getStoreSize(); 839}]>; 840 841// Like 'load', but always requires vector size alignment. 842def alignedload : PatFrag<(ops node:$ptr), (load node:$ptr), [{ 843 auto *Ld = cast<LoadSDNode>(N); 844 return Ld->getAlignment() >= Ld->getMemoryVT().getStoreSize(); 845}]>; 846 847// 128-bit aligned load pattern fragments 848// NOTE: all 128-bit integer vector loads are promoted to v2i64 849def alignedloadv4f32 : PatFrag<(ops node:$ptr), 850 (v4f32 (alignedload node:$ptr))>; 851def alignedloadv2f64 : PatFrag<(ops node:$ptr), 852 (v2f64 (alignedload node:$ptr))>; 853def alignedloadv2i64 : PatFrag<(ops node:$ptr), 854 (v2i64 (alignedload node:$ptr))>; 855def alignedloadv4i32 : PatFrag<(ops node:$ptr), 856 (v4i32 (alignedload node:$ptr))>; 857def alignedloadv8i16 : PatFrag<(ops node:$ptr), 858 (v8i16 (alignedload node:$ptr))>; 859def alignedloadv16i8 : PatFrag<(ops node:$ptr), 860 (v16i8 (alignedload node:$ptr))>; 861 862// 256-bit aligned load pattern fragments 863// NOTE: all 256-bit integer vector loads are promoted to v4i64 864def alignedloadv8f32 : PatFrag<(ops node:$ptr), 865 (v8f32 (alignedload node:$ptr))>; 866def alignedloadv4f64 : PatFrag<(ops node:$ptr), 867 (v4f64 (alignedload node:$ptr))>; 868def alignedloadv4i64 : PatFrag<(ops node:$ptr), 869 (v4i64 (alignedload node:$ptr))>; 870def alignedloadv8i32 : PatFrag<(ops node:$ptr), 871 (v8i32 (alignedload node:$ptr))>; 872def alignedloadv16i16 : PatFrag<(ops node:$ptr), 873 (v16i16 (alignedload node:$ptr))>; 874def alignedloadv32i8 : PatFrag<(ops node:$ptr), 875 (v32i8 (alignedload node:$ptr))>; 876 877// 512-bit aligned load pattern fragments 878def alignedloadv16f32 : PatFrag<(ops node:$ptr), 879 (v16f32 (alignedload node:$ptr))>; 880def alignedloadv8f64 : PatFrag<(ops node:$ptr), 881 (v8f64 (alignedload node:$ptr))>; 882def alignedloadv8i64 : PatFrag<(ops node:$ptr), 883 (v8i64 (alignedload node:$ptr))>; 884def alignedloadv16i32 : PatFrag<(ops node:$ptr), 885 (v16i32 (alignedload node:$ptr))>; 886def alignedloadv32i16 : PatFrag<(ops node:$ptr), 887 (v32i16 (alignedload node:$ptr))>; 888def alignedloadv64i8 : PatFrag<(ops node:$ptr), 889 (v64i8 (alignedload node:$ptr))>; 890 891// Like 'load', but uses special alignment checks suitable for use in 892// memory operands in most SSE instructions, which are required to 893// be naturally aligned on some targets but not on others. If the subtarget 894// allows unaligned accesses, match any load, though this may require 895// setting a feature bit in the processor (on startup, for example). 896// Opteron 10h and later implement such a feature. 897def memop : PatFrag<(ops node:$ptr), (load node:$ptr), [{ 898 auto *Ld = cast<LoadSDNode>(N); 899 return Subtarget->hasSSEUnalignedMem() || 900 Ld->getAlignment() >= Ld->getMemoryVT().getStoreSize(); 901}]>; 902 903// 128-bit memop pattern fragments 904// NOTE: all 128-bit integer vector loads are promoted to v2i64 905def memopv4f32 : PatFrag<(ops node:$ptr), (v4f32 (memop node:$ptr))>; 906def memopv2f64 : PatFrag<(ops node:$ptr), (v2f64 (memop node:$ptr))>; 907def memopv2i64 : PatFrag<(ops node:$ptr), (v2i64 (memop node:$ptr))>; 908def memopv4i32 : PatFrag<(ops node:$ptr), (v4i32 (memop node:$ptr))>; 909def memopv8i16 : PatFrag<(ops node:$ptr), (v8i16 (memop node:$ptr))>; 910def memopv16i8 : PatFrag<(ops node:$ptr), (v16i8 (memop node:$ptr))>; 911 912// 128-bit bitconvert pattern fragments 913def bc_v4f32 : PatFrag<(ops node:$in), (v4f32 (bitconvert node:$in))>; 914def bc_v2f64 : PatFrag<(ops node:$in), (v2f64 (bitconvert node:$in))>; 915def bc_v16i8 : PatFrag<(ops node:$in), (v16i8 (bitconvert node:$in))>; 916def bc_v8i16 : PatFrag<(ops node:$in), (v8i16 (bitconvert node:$in))>; 917def bc_v4i32 : PatFrag<(ops node:$in), (v4i32 (bitconvert node:$in))>; 918def bc_v2i64 : PatFrag<(ops node:$in), (v2i64 (bitconvert node:$in))>; 919 920// 256-bit bitconvert pattern fragments 921def bc_v32i8 : PatFrag<(ops node:$in), (v32i8 (bitconvert node:$in))>; 922def bc_v16i16 : PatFrag<(ops node:$in), (v16i16 (bitconvert node:$in))>; 923def bc_v8i32 : PatFrag<(ops node:$in), (v8i32 (bitconvert node:$in))>; 924def bc_v4i64 : PatFrag<(ops node:$in), (v4i64 (bitconvert node:$in))>; 925def bc_v8f32 : PatFrag<(ops node:$in), (v8f32 (bitconvert node:$in))>; 926def bc_v4f64 : PatFrag<(ops node:$in), (v4f64 (bitconvert node:$in))>; 927 928// 512-bit bitconvert pattern fragments 929def bc_v64i8 : PatFrag<(ops node:$in), (v64i8 (bitconvert node:$in))>; 930def bc_v32i16 : PatFrag<(ops node:$in), (v32i16 (bitconvert node:$in))>; 931def bc_v16i32 : PatFrag<(ops node:$in), (v16i32 (bitconvert node:$in))>; 932def bc_v8i64 : PatFrag<(ops node:$in), (v8i64 (bitconvert node:$in))>; 933def bc_v8f64 : PatFrag<(ops node:$in), (v8f64 (bitconvert node:$in))>; 934def bc_v16f32 : PatFrag<(ops node:$in), (v16f32 (bitconvert node:$in))>; 935 936def X86vzload32 : PatFrag<(ops node:$src), 937 (X86vzld node:$src), [{ 938 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 4; 939}]>; 940 941def X86vzload64 : PatFrag<(ops node:$src), 942 (X86vzld node:$src), [{ 943 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 8; 944}]>; 945 946def X86vextractstore64 : PatFrag<(ops node:$val, node:$ptr), 947 (X86vextractst node:$val, node:$ptr), [{ 948 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 8; 949}]>; 950 951def X86VBroadcastld8 : PatFrag<(ops node:$src), 952 (X86VBroadcastld node:$src), [{ 953 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 1; 954}]>; 955 956def X86VBroadcastld16 : PatFrag<(ops node:$src), 957 (X86VBroadcastld node:$src), [{ 958 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 2; 959}]>; 960 961def X86VBroadcastld32 : PatFrag<(ops node:$src), 962 (X86VBroadcastld node:$src), [{ 963 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 4; 964}]>; 965 966def X86VBroadcastld64 : PatFrag<(ops node:$src), 967 (X86VBroadcastld node:$src), [{ 968 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 8; 969}]>; 970 971// Scalar SSE intrinsic fragments to match several different types of loads. 972// Used by scalar SSE intrinsic instructions which have 128 bit types, but 973// only load a single element. 974// FIXME: We should add more canolicalizing in DAGCombine. Particulary removing 975// the simple_load case. 976def sse_load_f32 : PatFrags<(ops node:$ptr), 977 [(v4f32 (simple_load node:$ptr)), 978 (v4f32 (X86vzload32 node:$ptr)), 979 (v4f32 (scalar_to_vector (loadf32 node:$ptr)))]>; 980def sse_load_f64 : PatFrags<(ops node:$ptr), 981 [(v2f64 (simple_load node:$ptr)), 982 (v2f64 (X86vzload64 node:$ptr)), 983 (v2f64 (scalar_to_vector (loadf64 node:$ptr)))]>; 984 985def ssmem : X86MemOperand<"printdwordmem", X86Mem32AsmOperand>; 986def sdmem : X86MemOperand<"printqwordmem", X86Mem64AsmOperand>; 987 988 989def fp32imm0 : PatLeaf<(f32 fpimm), [{ 990 return N->isExactlyValue(+0.0); 991}]>; 992 993def fp64imm0 : PatLeaf<(f64 fpimm), [{ 994 return N->isExactlyValue(+0.0); 995}]>; 996 997def fp128imm0 : PatLeaf<(f128 fpimm), [{ 998 return N->isExactlyValue(+0.0); 999}]>; 1000 1001// EXTRACT_get_vextract128_imm xform function: convert extract_subvector index 1002// to VEXTRACTF128/VEXTRACTI128 imm. 1003def EXTRACT_get_vextract128_imm : SDNodeXForm<extract_subvector, [{ 1004 return getExtractVEXTRACTImmediate(N, 128, SDLoc(N)); 1005}]>; 1006 1007// INSERT_get_vinsert128_imm xform function: convert insert_subvector index to 1008// VINSERTF128/VINSERTI128 imm. 1009def INSERT_get_vinsert128_imm : SDNodeXForm<insert_subvector, [{ 1010 return getInsertVINSERTImmediate(N, 128, SDLoc(N)); 1011}]>; 1012 1013// EXTRACT_get_vextract256_imm xform function: convert extract_subvector index 1014// to VEXTRACTF64x4 imm. 1015def EXTRACT_get_vextract256_imm : SDNodeXForm<extract_subvector, [{ 1016 return getExtractVEXTRACTImmediate(N, 256, SDLoc(N)); 1017}]>; 1018 1019// INSERT_get_vinsert256_imm xform function: convert insert_subvector index to 1020// VINSERTF64x4 imm. 1021def INSERT_get_vinsert256_imm : SDNodeXForm<insert_subvector, [{ 1022 return getInsertVINSERTImmediate(N, 256, SDLoc(N)); 1023}]>; 1024 1025def vextract128_extract : PatFrag<(ops node:$bigvec, node:$index), 1026 (extract_subvector node:$bigvec, 1027 node:$index), [{ 1028 // Index 0 can be handled via extract_subreg. 1029 return !isNullConstant(N->getOperand(1)); 1030}], EXTRACT_get_vextract128_imm>; 1031 1032def vinsert128_insert : PatFrag<(ops node:$bigvec, node:$smallvec, 1033 node:$index), 1034 (insert_subvector node:$bigvec, node:$smallvec, 1035 node:$index), [{}], 1036 INSERT_get_vinsert128_imm>; 1037 1038def vextract256_extract : PatFrag<(ops node:$bigvec, node:$index), 1039 (extract_subvector node:$bigvec, 1040 node:$index), [{ 1041 // Index 0 can be handled via extract_subreg. 1042 return !isNullConstant(N->getOperand(1)); 1043}], EXTRACT_get_vextract256_imm>; 1044 1045def vinsert256_insert : PatFrag<(ops node:$bigvec, node:$smallvec, 1046 node:$index), 1047 (insert_subvector node:$bigvec, node:$smallvec, 1048 node:$index), [{}], 1049 INSERT_get_vinsert256_imm>; 1050 1051def masked_load : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1052 (masked_ld node:$src1, undef, node:$src2, node:$src3), [{ 1053 return !cast<MaskedLoadSDNode>(N)->isExpandingLoad() && 1054 cast<MaskedLoadSDNode>(N)->getExtensionType() == ISD::NON_EXTLOAD && 1055 cast<MaskedLoadSDNode>(N)->isUnindexed(); 1056}]>; 1057 1058def masked_load_aligned : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1059 (masked_load node:$src1, node:$src2, node:$src3), [{ 1060 // Use the node type to determine the size the alignment needs to match. 1061 // We can't use memory VT because type widening changes the node VT, but 1062 // not the memory VT. 1063 auto *Ld = cast<MaskedLoadSDNode>(N); 1064 return Ld->getAlignment() >= Ld->getValueType(0).getStoreSize(); 1065}]>; 1066 1067def X86mExpandingLoad : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1068 (masked_ld node:$src1, undef, node:$src2, node:$src3), [{ 1069 return cast<MaskedLoadSDNode>(N)->isExpandingLoad() && 1070 cast<MaskedLoadSDNode>(N)->isUnindexed(); 1071}]>; 1072 1073// Masked store fragments. 1074// X86mstore can't be implemented in core DAG files because some targets 1075// do not support vector types (llvm-tblgen will fail). 1076def masked_store : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1077 (masked_st node:$src1, node:$src2, undef, node:$src3), [{ 1078 return !cast<MaskedStoreSDNode>(N)->isTruncatingStore() && 1079 !cast<MaskedStoreSDNode>(N)->isCompressingStore() && 1080 cast<MaskedStoreSDNode>(N)->isUnindexed(); 1081}]>; 1082 1083def masked_store_aligned : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1084 (masked_store node:$src1, node:$src2, node:$src3), [{ 1085 // Use the node type to determine the size the alignment needs to match. 1086 // We can't use memory VT because type widening changes the node VT, but 1087 // not the memory VT. 1088 auto *St = cast<MaskedStoreSDNode>(N); 1089 return St->getAlignment() >= St->getOperand(1).getValueType().getStoreSize(); 1090}]>; 1091 1092def X86mCompressingStore : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1093 (masked_st node:$src1, node:$src2, undef, node:$src3), [{ 1094 return cast<MaskedStoreSDNode>(N)->isCompressingStore() && 1095 cast<MaskedStoreSDNode>(N)->isUnindexed(); 1096}]>; 1097 1098// masked truncstore fragments 1099// X86mtruncstore can't be implemented in core DAG files because some targets 1100// doesn't support vector type ( llvm-tblgen will fail) 1101def X86mtruncstore : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1102 (masked_st node:$src1, node:$src2, undef, node:$src3), [{ 1103 return cast<MaskedStoreSDNode>(N)->isTruncatingStore() && 1104 cast<MaskedStoreSDNode>(N)->isUnindexed(); 1105}]>; 1106def masked_truncstorevi8 : 1107 PatFrag<(ops node:$src1, node:$src2, node:$src3), 1108 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{ 1109 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1110}]>; 1111def masked_truncstorevi16 : 1112 PatFrag<(ops node:$src1, node:$src2, node:$src3), 1113 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{ 1114 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1115}]>; 1116def masked_truncstorevi32 : 1117 PatFrag<(ops node:$src1, node:$src2, node:$src3), 1118 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{ 1119 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1120}]>; 1121 1122def X86TruncSStore : SDNode<"X86ISD::VTRUNCSTORES", SDTStore, 1123 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 1124 1125def X86TruncUSStore : SDNode<"X86ISD::VTRUNCSTOREUS", SDTStore, 1126 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 1127 1128def X86MTruncSStore : SDNode<"X86ISD::VMTRUNCSTORES", SDTX86MaskedStore, 1129 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 1130 1131def X86MTruncUSStore : SDNode<"X86ISD::VMTRUNCSTOREUS", SDTX86MaskedStore, 1132 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 1133 1134def truncstore_s_vi8 : PatFrag<(ops node:$val, node:$ptr), 1135 (X86TruncSStore node:$val, node:$ptr), [{ 1136 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1137}]>; 1138 1139def truncstore_us_vi8 : PatFrag<(ops node:$val, node:$ptr), 1140 (X86TruncUSStore node:$val, node:$ptr), [{ 1141 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1142}]>; 1143 1144def truncstore_s_vi16 : PatFrag<(ops node:$val, node:$ptr), 1145 (X86TruncSStore node:$val, node:$ptr), [{ 1146 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1147}]>; 1148 1149def truncstore_us_vi16 : PatFrag<(ops node:$val, node:$ptr), 1150 (X86TruncUSStore node:$val, node:$ptr), [{ 1151 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1152}]>; 1153 1154def truncstore_s_vi32 : PatFrag<(ops node:$val, node:$ptr), 1155 (X86TruncSStore node:$val, node:$ptr), [{ 1156 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1157}]>; 1158 1159def truncstore_us_vi32 : PatFrag<(ops node:$val, node:$ptr), 1160 (X86TruncUSStore node:$val, node:$ptr), [{ 1161 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1162}]>; 1163 1164def masked_truncstore_s_vi8 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1165 (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{ 1166 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1167}]>; 1168 1169def masked_truncstore_us_vi8 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1170 (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{ 1171 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1172}]>; 1173 1174def masked_truncstore_s_vi16 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1175 (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{ 1176 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1177}]>; 1178 1179def masked_truncstore_us_vi16 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1180 (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{ 1181 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1182}]>; 1183 1184def masked_truncstore_s_vi32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1185 (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{ 1186 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1187}]>; 1188 1189def masked_truncstore_us_vi32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1190 (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{ 1191 return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1192}]>; 1193