1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=i686-unknown | FileCheck %s --check-prefix=X32
3; RUN: llc < %s -mtriple=x86_64-unknown | FileCheck %s --check-prefix=X64
4
5define void @si64(i64 %x, i64 %y, i64* %p, i64* %q) nounwind {
6; X32-LABEL: si64:
7; X32:       # %bb.0:
8; X32-NEXT:    pushl %ebp
9; X32-NEXT:    pushl %ebx
10; X32-NEXT:    pushl %edi
11; X32-NEXT:    pushl %esi
12; X32-NEXT:    movl {{[0-9]+}}(%esp), %ebx
13; X32-NEXT:    movl {{[0-9]+}}(%esp), %ebp
14; X32-NEXT:    pushl %ebp
15; X32-NEXT:    pushl %ebx
16; X32-NEXT:    pushl {{[0-9]+}}(%esp)
17; X32-NEXT:    pushl {{[0-9]+}}(%esp)
18; X32-NEXT:    calll __divdi3
19; X32-NEXT:    addl $16, %esp
20; X32-NEXT:    movl %eax, %esi
21; X32-NEXT:    movl %edx, %edi
22; X32-NEXT:    pushl %ebp
23; X32-NEXT:    pushl %ebx
24; X32-NEXT:    pushl {{[0-9]+}}(%esp)
25; X32-NEXT:    pushl {{[0-9]+}}(%esp)
26; X32-NEXT:    calll __moddi3
27; X32-NEXT:    addl $16, %esp
28; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
29; X32-NEXT:    movl %edi, 4(%ecx)
30; X32-NEXT:    movl %esi, (%ecx)
31; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
32; X32-NEXT:    movl %edx, 4(%ecx)
33; X32-NEXT:    movl %eax, (%ecx)
34; X32-NEXT:    popl %esi
35; X32-NEXT:    popl %edi
36; X32-NEXT:    popl %ebx
37; X32-NEXT:    popl %ebp
38; X32-NEXT:    retl
39;
40; X64-LABEL: si64:
41; X64:       # %bb.0:
42; X64-NEXT:    movq %rdx, %r8
43; X64-NEXT:    movq %rdi, %rax
44; X64-NEXT:    cqto
45; X64-NEXT:    idivq %rsi
46; X64-NEXT:    movq %rax, (%r8)
47; X64-NEXT:    movq %rdx, (%rcx)
48; X64-NEXT:    retq
49	%r = sdiv i64 %x, %y
50	%t = srem i64 %x, %y
51	store i64 %r, i64* %p
52	store i64 %t, i64* %q
53	ret void
54}
55
56define void @si32(i32 %x, i32 %y, i32* %p, i32* %q) nounwind {
57; X32-LABEL: si32:
58; X32:       # %bb.0:
59; X32-NEXT:    pushl %esi
60; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
61; X32-NEXT:    cltd
62; X32-NEXT:    idivl {{[0-9]+}}(%esp)
63; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
64; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi
65; X32-NEXT:    movl %eax, (%esi)
66; X32-NEXT:    movl %edx, (%ecx)
67; X32-NEXT:    popl %esi
68; X32-NEXT:    retl
69;
70; X64-LABEL: si32:
71; X64:       # %bb.0:
72; X64-NEXT:    movq %rdx, %r8
73; X64-NEXT:    movl %edi, %eax
74; X64-NEXT:    cltd
75; X64-NEXT:    idivl %esi
76; X64-NEXT:    movl %eax, (%r8)
77; X64-NEXT:    movl %edx, (%rcx)
78; X64-NEXT:    retq
79	%r = sdiv i32 %x, %y
80	%t = srem i32 %x, %y
81	store i32 %r, i32* %p
82	store i32 %t, i32* %q
83	ret void
84}
85
86define void @si16(i16 %x, i16 %y, i16* %p, i16* %q) nounwind {
87; X32-LABEL: si16:
88; X32:       # %bb.0:
89; X32-NEXT:    pushl %esi
90; X32-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
91; X32-NEXT:    cwtd
92; X32-NEXT:    idivw {{[0-9]+}}(%esp)
93; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
94; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi
95; X32-NEXT:    movw %ax, (%esi)
96; X32-NEXT:    movw %dx, (%ecx)
97; X32-NEXT:    popl %esi
98; X32-NEXT:    retl
99;
100; X64-LABEL: si16:
101; X64:       # %bb.0:
102; X64-NEXT:    movq %rdx, %r8
103; X64-NEXT:    movl %edi, %eax
104; X64-NEXT:    # kill: def $ax killed $ax killed $eax
105; X64-NEXT:    cwtd
106; X64-NEXT:    idivw %si
107; X64-NEXT:    movw %ax, (%r8)
108; X64-NEXT:    movw %dx, (%rcx)
109; X64-NEXT:    retq
110	%r = sdiv i16 %x, %y
111	%t = srem i16 %x, %y
112	store i16 %r, i16* %p
113	store i16 %t, i16* %q
114	ret void
115}
116
117define void @si8(i8 %x, i8 %y, i8* %p, i8* %q) nounwind {
118; X32-LABEL: si8:
119; X32:       # %bb.0:
120; X32-NEXT:    pushl %ebx
121; X32-NEXT:    movsbl {{[0-9]+}}(%esp), %eax
122; X32-NEXT:    idivb {{[0-9]+}}(%esp)
123; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
124; X32-NEXT:    movl {{[0-9]+}}(%esp), %edx
125; X32-NEXT:    movsbl %ah, %ebx
126; X32-NEXT:    movb %al, (%edx)
127; X32-NEXT:    movb %bl, (%ecx)
128; X32-NEXT:    popl %ebx
129; X32-NEXT:    retl
130;
131; X64-LABEL: si8:
132; X64:       # %bb.0:
133; X64-NEXT:    movsbl %dil, %eax
134; X64-NEXT:    idivb %sil
135; X64-NEXT:    movsbl %ah, %esi
136; X64-NEXT:    movb %al, (%rdx)
137; X64-NEXT:    movb %sil, (%rcx)
138; X64-NEXT:    retq
139	%r = sdiv i8 %x, %y
140	%t = srem i8 %x, %y
141	store i8 %r, i8* %p
142	store i8 %t, i8* %q
143	ret void
144}
145
146define void @ui64(i64 %x, i64 %y, i64* %p, i64* %q) nounwind {
147; X32-LABEL: ui64:
148; X32:       # %bb.0:
149; X32-NEXT:    pushl %ebp
150; X32-NEXT:    pushl %ebx
151; X32-NEXT:    pushl %edi
152; X32-NEXT:    pushl %esi
153; X32-NEXT:    movl {{[0-9]+}}(%esp), %ebx
154; X32-NEXT:    movl {{[0-9]+}}(%esp), %ebp
155; X32-NEXT:    pushl %ebp
156; X32-NEXT:    pushl %ebx
157; X32-NEXT:    pushl {{[0-9]+}}(%esp)
158; X32-NEXT:    pushl {{[0-9]+}}(%esp)
159; X32-NEXT:    calll __udivdi3
160; X32-NEXT:    addl $16, %esp
161; X32-NEXT:    movl %eax, %esi
162; X32-NEXT:    movl %edx, %edi
163; X32-NEXT:    pushl %ebp
164; X32-NEXT:    pushl %ebx
165; X32-NEXT:    pushl {{[0-9]+}}(%esp)
166; X32-NEXT:    pushl {{[0-9]+}}(%esp)
167; X32-NEXT:    calll __umoddi3
168; X32-NEXT:    addl $16, %esp
169; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
170; X32-NEXT:    movl %edi, 4(%ecx)
171; X32-NEXT:    movl %esi, (%ecx)
172; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
173; X32-NEXT:    movl %edx, 4(%ecx)
174; X32-NEXT:    movl %eax, (%ecx)
175; X32-NEXT:    popl %esi
176; X32-NEXT:    popl %edi
177; X32-NEXT:    popl %ebx
178; X32-NEXT:    popl %ebp
179; X32-NEXT:    retl
180;
181; X64-LABEL: ui64:
182; X64:       # %bb.0:
183; X64-NEXT:    movq %rdx, %r8
184; X64-NEXT:    movq %rdi, %rax
185; X64-NEXT:    xorl %edx, %edx
186; X64-NEXT:    divq %rsi
187; X64-NEXT:    movq %rax, (%r8)
188; X64-NEXT:    movq %rdx, (%rcx)
189; X64-NEXT:    retq
190	%r = udiv i64 %x, %y
191	%t = urem i64 %x, %y
192	store i64 %r, i64* %p
193	store i64 %t, i64* %q
194	ret void
195}
196
197define void @ui32(i32 %x, i32 %y, i32* %p, i32* %q) nounwind {
198; X32-LABEL: ui32:
199; X32:       # %bb.0:
200; X32-NEXT:    pushl %esi
201; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
202; X32-NEXT:    xorl %edx, %edx
203; X32-NEXT:    divl {{[0-9]+}}(%esp)
204; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
205; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi
206; X32-NEXT:    movl %eax, (%esi)
207; X32-NEXT:    movl %edx, (%ecx)
208; X32-NEXT:    popl %esi
209; X32-NEXT:    retl
210;
211; X64-LABEL: ui32:
212; X64:       # %bb.0:
213; X64-NEXT:    movq %rdx, %r8
214; X64-NEXT:    movl %edi, %eax
215; X64-NEXT:    xorl %edx, %edx
216; X64-NEXT:    divl %esi
217; X64-NEXT:    movl %eax, (%r8)
218; X64-NEXT:    movl %edx, (%rcx)
219; X64-NEXT:    retq
220	%r = udiv i32 %x, %y
221	%t = urem i32 %x, %y
222	store i32 %r, i32* %p
223	store i32 %t, i32* %q
224	ret void
225}
226
227define void @ui16(i16 %x, i16 %y, i16* %p, i16* %q) nounwind {
228; X32-LABEL: ui16:
229; X32:       # %bb.0:
230; X32-NEXT:    pushl %esi
231; X32-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
232; X32-NEXT:    xorl %edx, %edx
233; X32-NEXT:    divw {{[0-9]+}}(%esp)
234; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
235; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi
236; X32-NEXT:    movw %ax, (%esi)
237; X32-NEXT:    movw %dx, (%ecx)
238; X32-NEXT:    popl %esi
239; X32-NEXT:    retl
240;
241; X64-LABEL: ui16:
242; X64:       # %bb.0:
243; X64-NEXT:    movq %rdx, %r8
244; X64-NEXT:    movl %edi, %eax
245; X64-NEXT:    # kill: def $ax killed $ax killed $eax
246; X64-NEXT:    xorl %edx, %edx
247; X64-NEXT:    divw %si
248; X64-NEXT:    movw %ax, (%r8)
249; X64-NEXT:    movw %dx, (%rcx)
250; X64-NEXT:    retq
251	%r = udiv i16 %x, %y
252	%t = urem i16 %x, %y
253	store i16 %r, i16* %p
254	store i16 %t, i16* %q
255	ret void
256}
257
258define void @ui8(i8 %x, i8 %y, i8* %p, i8* %q) nounwind {
259; X32-LABEL: ui8:
260; X32:       # %bb.0:
261; X32-NEXT:    pushl %ebx
262; X32-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
263; X32-NEXT:    divb {{[0-9]+}}(%esp)
264; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
265; X32-NEXT:    movl {{[0-9]+}}(%esp), %edx
266; X32-NEXT:    movzbl %ah, %ebx
267; X32-NEXT:    movb %al, (%edx)
268; X32-NEXT:    movb %bl, (%ecx)
269; X32-NEXT:    popl %ebx
270; X32-NEXT:    retl
271;
272; X64-LABEL: ui8:
273; X64:       # %bb.0:
274; X64-NEXT:    movzbl %dil, %eax
275; X64-NEXT:    divb %sil
276; X64-NEXT:    movzbl %ah, %esi
277; X64-NEXT:    movb %al, (%rdx)
278; X64-NEXT:    movb %sil, (%rcx)
279; X64-NEXT:    retq
280	%r = udiv i8 %x, %y
281	%t = urem i8 %x, %y
282	store i8 %r, i8* %p
283	store i8 %t, i8* %q
284	ret void
285}
286