1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=i386-unknown -mattr=+sse2,-sse4.1 | FileCheck %s --check-prefix=X86
3; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+sse2,-sse4.1 | FileCheck %s --check-prefix=X64
4
5define void @test(<8 x i16>* %b, i16 %a0, i16 %a1, i16 %a2, i16 %a3, i16 %a4, i16 %a5, i16 %a6, i16 %a7) nounwind {
6; X86-LABEL: test:
7; X86:       # %bb.0:
8; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
9; X86-NEXT:    movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
10; X86-NEXT:    movd {{.*#+}} xmm1 = mem[0],zero,zero,zero
11; X86-NEXT:    punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
12; X86-NEXT:    movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
13; X86-NEXT:    movd {{.*#+}} xmm2 = mem[0],zero,zero,zero
14; X86-NEXT:    punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
15; X86-NEXT:    punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
16; X86-NEXT:    movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
17; X86-NEXT:    movd {{.*#+}} xmm1 = mem[0],zero,zero,zero
18; X86-NEXT:    punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
19; X86-NEXT:    movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
20; X86-NEXT:    movd {{.*#+}} xmm3 = mem[0],zero,zero,zero
21; X86-NEXT:    punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
22; X86-NEXT:    punpckldq {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
23; X86-NEXT:    punpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm2[0]
24; X86-NEXT:    movdqa %xmm3, (%eax)
25; X86-NEXT:    retl
26;
27; X64-LABEL: test:
28; X64:       # %bb.0:
29; X64-NEXT:    movd {{.*#+}} xmm0 = mem[0],zero,zero,zero
30; X64-NEXT:    movd {{.*#+}} xmm1 = mem[0],zero,zero,zero
31; X64-NEXT:    punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
32; X64-NEXT:    movd %r9d, %xmm0
33; X64-NEXT:    movd {{.*#+}} xmm2 = mem[0],zero,zero,zero
34; X64-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3]
35; X64-NEXT:    punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
36; X64-NEXT:    movd %r8d, %xmm1
37; X64-NEXT:    movd %ecx, %xmm2
38; X64-NEXT:    punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
39; X64-NEXT:    movd %edx, %xmm1
40; X64-NEXT:    movd %esi, %xmm3
41; X64-NEXT:    punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
42; X64-NEXT:    punpckldq {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
43; X64-NEXT:    punpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm0[0]
44; X64-NEXT:    movdqa %xmm3, (%rdi)
45; X64-NEXT:    retq
46  %tmp = insertelement <8 x i16> zeroinitializer, i16 %a0, i32 0
47  %tmp2 = insertelement <8 x i16> %tmp, i16 %a1, i32 1
48  %tmp4 = insertelement <8 x i16> %tmp2, i16 %a2, i32 2
49  %tmp6 = insertelement <8 x i16> %tmp4, i16 %a3, i32 3
50  %tmp8 = insertelement <8 x i16> %tmp6, i16 %a4, i32 4
51  %tmp10 = insertelement <8 x i16> %tmp8, i16 %a5, i32 5
52  %tmp12 = insertelement <8 x i16> %tmp10, i16 %a6, i32 6
53  %tmp14 = insertelement <8 x i16> %tmp12, i16 %a7, i32 7
54  store <8 x i16> %tmp14, <8 x i16>* %b
55  ret void
56}
57
58