Lines Matching refs:vB

23     (set i64:$rD, (int_ppc_altivec_vextublx i64:$rA, v16i8:$vB))
24 (set i64:$rD, (int_ppc_altivec_vextuhlx i64:$rA, v8i16:$vB))
25 (set i64:$rD, (int_ppc_altivec_vextuwlx i64:$rA, v4i32:$vB))
28 (set i64:$rD, (int_ppc_altivec_vextubrx i64:$rA, v16i8:$vB))
29 (set i64:$rD, (int_ppc_altivec_vextuhrx i64:$rA, v8i16:$vB))
30 (set i64:$rD, (int_ppc_altivec_vextuwrx i64:$rA, v4i32:$vB))
41 (set i64:$rD, (int_ppc_altivec_vclzlsbb v16i8:$vB))
42 (set i64:$rD, (int_ppc_altivec_vctzlsbb v16i8:$vB))
46 (set v16i8:$vD, (cttz v16i8:$vB)) // vctzb
47 (set v8i16:$vD, (cttz v8i16:$vB)) // vctzh
48 (set v4i32:$vD, (cttz v4i32:$vB)) // vctzw
49 (set v2i64:$vD, (cttz v2i64:$vB)) // vctzd
53 (set v4i32:$vD, (sext v4i8:$vB))
61 (set v4i32:$vD, (sext v4i16:$vB))
69 (set v2i64:$vD, (sext v2i8:$vB))
77 (set v2i64:$vD, (sext v2i16:$vB))
85 (set v2i64:$vD, (sext v2i32:$vB))
99 (set v4i32:$rD, (int_ppc_altivec_vprtybw v4i32:$vB))
100 (set v2i64:$rD, (int_ppc_altivec_vprtybd v2i64:$vB))
101 (set v1i128:$rD, (int_ppc_altivec_vprtybq v1i128:$vB))
146 (set v1i128:$vD, (int_ppc_altivec_bcdcfno v1i128:$vB, i1:$PS))
147 (set v1i128:$vD, (int_ppc_altivec_bcdcfzo v1i128:$vB, i1:$PS))
148 (set v1i128:$vD, (int_ppc_altivec_bcdctno v1i128:$vB))
149 (set v1i128:$vD, (int_ppc_altivec_bcdctzo v1i128:$vB, i1:$PS))
150 (set v1i128:$vD, (int_ppc_altivec_bcdcfsqo v1i128:$vB, i1:$PS))
151 (set v1i128:$vD, (int_ppc_altivec_bcdctsqo v1i128:$vB))
155 (set v1i128:$vD, (int_ppc_altivec_bcdcpsgno v1i128:$vA, v1i128:$vB))
156 (set v1i128:$vD, (int_ppc_altivec_bcdsetsgno v1i128:$vB, i1:$PS))
160 (set v1i128:$vD, (int_ppc_altivec_bcdso v1i128:$vA, v1i128:$vB, i1:$PS))
161 (set v1i128:$vD, (int_ppc_altivec_bcduso v1i128:$vA, v1i128:$vB))
162 (set v1i128:$vD, (int_ppc_altivec_bcdsro v1i128:$vA, v1i128:$vB, i1:$PS))
168 (set v1i128:$vD, (int_ppc_altivec_bcdso v1i128:$vA, v1i128:$vB, i1:$PS))
169 (set v1i128:$vD, (int_ppc_altivec_bcduso v1i128:$vA, v1i128:$vB))
176 . (set f128:$vT, (fcopysign f128:$vB, f128:$vA)
180 . (set f128:$vT, (fabs f128:$vB)) // xsabsqp
181 (set f128:$vT, (fneg (fabs f128:$vB))) // xsnabsqp
182 (set f128:$vT, (fneg f128:$vB)) // xsnegqp
188 (set f128:$vT, (fadd f128:$vA, f128:$vB)) // xsaddqp
189 (set f128:$vT, (fmul f128:$vA, f128:$vB)) // xsmulqp
192 (set f128:$vT, (fdiv f128:$vA, f128:$vB)) // xsdivqp
193 (set f128:$vT, (fsub f128:$vA, f128:$vB)) // xssubqp
194 (set f128:$vT, (fsqrt f128:$vB))) // xssqrtqp
213 (set f128:$vT, (PPCfaddrto f128:$vA, f128:$vB)) // xsaddqpo
214 (set f128:$vT, (PPCfmulrto f128:$vA, f128:$vB)) // xsmulqpo
217 (set f128:$vT, (PPCfdivrto f128:$vA, f128:$vB)) // xsdivqpo
218 (set f128:$vT, (PPCfsubrto f128:$vA, f128:$vB)) // xssubqpo
219 (set f128:$vT, (PPCfsqrtrto f128:$vB)) // xssqrtqpo
226 [(set f128:$vT, (fma f128:$vA, f128:$vB, f128:$vTi))]>,
231 [(set f128:$vT, (fma f128:$vA, f128:$vB, (fneg f128:$vTi)))]>,
236 [(set f128:$vT, (fneg (fma f128:$vA, f128:$vB, f128:$vTi)))]>,
241 [(set f128:$vT, (fneg (fma f128:$vA, f128:$vB, (fneg f128:$vTi))))]>,
278 [(set f128:$vT, (PPCfmarto f128:$vA, f128:$vB, f128:$vTi))]>,
283 [(set f128:$vT, (PPCfmarto f128:$vA, f128:$vB, (fneg f128:$vTi)))]>,
288 [(set f128:$vT, (fneg (PPCfmarto f128:$vA, f128:$vB, f128:$vTi)))]>,
293 [(set f128:$vT, (fneg (PPCfmarto f128:$vA, f128:$vB, (fneg f128:$vTi))))]>,
369 (set f128:$vT, (int_ppc_vsx_xsrqpi f128:$vB))
370 (set f128:$vT, (int_ppc_vsx_xsrqpix f128:$vB))
374 (set f128:$vT, (int_ppc_vsx_xsrqpxp f128:$vB))
390 (set f128:$vT, (int_ppc_vsx_xsiexpqp f128:$vA, f64:$vB))
396 (set f128:$vT, (int_ppc_vsx_xsxexpqp f128$vB)) // xsxexpqp
397 (set f128:$vT, (int_ppc_vsx_xsxsigqp f128$vB)) // xsxsigqp