1; RUN: llc < %s -emulated-tls -mtriple=i386-linux-gnu | FileCheck -check-prefix=X86 %s
2; RUN: llc < %s -emulated-tls -mtriple=x86_64-linux-gnu | FileCheck -check-prefix=X64 %s
3; RUN: llc < %s -emulated-tls -mtriple=i386-linux-android | FileCheck -check-prefix=X86 %s
4; RUN: llc < %s -emulated-tls -mtriple=x86_64-linux-android | FileCheck -check-prefix=X64 %s
5
6; RUN: llc < %s -mtriple=i386-linux-gnu | FileCheck -check-prefix=NoEMU %s
7; RUN: llc < %s -mtriple=x86_64-linux-gnu | FileCheck -check-prefix=NoEMU %s
8; RUN: llc < %s -mtriple=i386-linux-android | FileCheck -check-prefix=X86 %s
9; RUN: llc < %s -mtriple=x86_64-linux-android | FileCheck -check-prefix=X64 %s
10
11; Copied from tls.ll; emulated TLS model is not implemented
12; for *-pc-win32 and *-pc-windows targets yet.
13
14; NoEMU-NOT: __emutls
15
16; Use my_emutls_get_address like __emutls_get_address.
17@my_emutls_v_xyz = external global i8*, align 4
18declare i8* @my_emutls_get_address(i8*)
19
20define dso_local i32 @my_get_xyz() {
21; X86-LABEL: my_get_xyz:
22; X86:         movl $my_emutls_v_xyz, (%esp)
23; X86-NEXT:    calll my_emutls_get_address
24; X86-NEXT:    movl (%eax), %eax
25; X86-NEXT:    addl $12, %esp
26; X86-NEXT:    .cfi_def_cfa_offset 4
27; X86-NEXT:    retl
28;
29; X64-LABEL: my_get_xyz:
30; X64:         movq my_emutls_v_xyz@GOTPCREL(%rip), %rdi
31; X64-NEXT:    callq my_emutls_get_address
32; X64-NEXT:    movl (%rax), %eax
33; X64-NEXT:    popq %rcx
34; X64-NEXT:    .cfi_def_cfa_offset 8
35; X64-NEXT:    retq
36entry:
37  %call = call i8* @my_emutls_get_address(i8* bitcast (i8** @my_emutls_v_xyz to i8*))
38  %0 = bitcast i8* %call to i32*
39  %1 = load i32, i32* %0, align 4
40  ret i32 %1
41}
42
43@i1 = dso_local thread_local global i32 15
44@i2 = external thread_local global i32
45@i3 = internal thread_local global i32 15
46@i4 = hidden thread_local global i32 15
47@i5 = external hidden thread_local global i32
48@s1 = dso_local thread_local global i16 15
49@b1 = dso_local thread_local global i8 0
50
51define dso_local i32 @f1() {
52; X86-LABEL: f1:
53; X86:         movl $__emutls_v.i1, (%esp)
54; X86-NEXT:    calll __emutls_get_address
55; X86-NEXT:    movl (%eax), %eax
56; X86-NEXT:    addl $12, %esp
57; X86-NEXT:    .cfi_def_cfa_offset 4
58; X86-NEXT:    retl
59;
60; X64-LABEL: f1:
61; X64:         movl $__emutls_v.i1, %edi
62; X64-NEXT:    callq __emutls_get_address
63; X64-NEXT:    movl (%rax), %eax
64; X64-NEXT:    popq %rcx
65; X64-NEXT:    .cfi_def_cfa_offset 8
66; X64-NEXT:    retq
67entry:
68  %tmp1 = load i32, i32* @i1
69  ret i32 %tmp1
70}
71
72define dso_local i32* @f2() {
73; X86-LABEL: f2:
74; X86:         movl $__emutls_v.i1, (%esp)
75; X86-NEXT:    calll __emutls_get_address
76; X86-NEXT:    addl $12, %esp
77; X86-NEXT:    .cfi_def_cfa_offset 4
78; X86-NEXT:    retl
79;
80; X64-LABEL: f2:
81; X64:         movl $__emutls_v.i1, %edi
82; X64-NEXT:    callq __emutls_get_address
83; X64-NEXT:    popq %rcx
84; X64-NEXT:    .cfi_def_cfa_offset 8
85; X64-NEXT:    retq
86entry:
87  ret i32* @i1
88}
89
90define dso_local i32 @f3() nounwind {
91; X86-LABEL: f3:
92; X86:         movl $__emutls_v.i2, (%esp)
93; X86-NEXT:    calll __emutls_get_address
94; X86-NEXT:    movl (%eax), %eax
95; X86-NEXT:    addl $12, %esp
96; X86-NEXT:    retl
97entry:
98  %tmp1 = load i32, i32* @i2
99  ret i32 %tmp1
100}
101
102define dso_local i32* @f4() {
103; X86-LABEL: f4:
104; X86:         movl $__emutls_v.i2, (%esp)
105; X86-NEXT:    calll __emutls_get_address
106; X86-NEXT:    addl $12, %esp
107; X86-NEXT:    .cfi_def_cfa_offset 4
108; X86-NEXT:    retl
109entry:
110  ret i32* @i2
111}
112
113define dso_local i32 @f5() nounwind {
114; X86-LABEL: f5:
115; X86:         movl $__emutls_v.i3, (%esp)
116; X86-NEXT:    calll __emutls_get_address
117; X86-NEXT:    movl (%eax), %eax
118; X86-NEXT:    addl $12, %esp
119; X86-NEXT:    retl
120entry:
121  %tmp1 = load i32, i32* @i3
122  ret i32 %tmp1
123}
124
125define dso_local i32* @f6() {
126; X86-LABEL: f6:
127; X86:         movl $__emutls_v.i3, (%esp)
128; X86-NEXT:    calll __emutls_get_address
129; X86-NEXT:    addl $12, %esp
130; X86-NEXT:    .cfi_def_cfa_offset 4
131; X86-NEXT:    retl
132entry:
133  ret i32* @i3
134}
135
136define dso_local i32 @f7() {
137; X86-LABEL: f7:
138; X86:         movl $__emutls_v.i4, (%esp)
139; X86-NEXT:    calll __emutls_get_address
140; X86-NEXT:    movl (%eax), %eax
141; X86-NEXT:    addl $12, %esp
142; X86-NEXT:    .cfi_def_cfa_offset 4
143; X86-NEXT:    retl
144entry:
145  %tmp1 = load i32, i32* @i4
146  ret i32 %tmp1
147}
148
149define dso_local i32* @f8() {
150; X86-LABEL: f8:
151; X86:         movl $__emutls_v.i4, (%esp)
152; X86-NEXT:    calll __emutls_get_address
153; X86-NEXT:    addl $12, %esp
154; X86-NEXT:    .cfi_def_cfa_offset 4
155; X86-NEXT:    retl
156entry:
157  ret i32* @i4
158}
159
160define dso_local i32 @f9() {
161; X86-LABEL: f9:
162; X86:         movl $__emutls_v.i5, (%esp)
163; X86-NEXT:    calll __emutls_get_address
164; X86-NEXT:    movl (%eax), %eax
165; X86-NEXT:    addl $12, %esp
166; X86-NEXT:    .cfi_def_cfa_offset 4
167; X86-NEXT:    retl
168entry:
169  %tmp1 = load i32, i32* @i5
170  ret i32 %tmp1
171}
172
173define dso_local i32* @f10() {
174; X86-LABEL: f10:
175; X86:         movl $__emutls_v.i5, (%esp)
176; X86-NEXT:    calll __emutls_get_address
177; X86-NEXT:    addl $12, %esp
178; X86-NEXT:    .cfi_def_cfa_offset 4
179; X86-NEXT:    retl
180entry:
181  ret i32* @i5
182}
183
184define dso_local i16 @f11() {
185; X86-LABEL: f11:
186; X86:         movl $__emutls_v.s1, (%esp)
187; X86-NEXT:    calll __emutls_get_address
188; X86-NEXT:    movzwl (%eax), %eax
189; X86-NEXT:    addl $12, %esp
190; X86-NEXT:    .cfi_def_cfa_offset 4
191; X86-NEXT:    retl
192entry:
193  %tmp1 = load i16, i16* @s1
194  ret i16 %tmp1
195}
196
197define dso_local i32 @f12() {
198; X86-LABEL: f12:
199; X86:         movl $__emutls_v.s1, (%esp)
200; X86-NEXT:    calll __emutls_get_address
201; X86-NEXT:    movswl (%eax), %eax
202; X86-NEXT:    addl $12, %esp
203; X86-NEXT:    .cfi_def_cfa_offset 4
204; X86-NEXT:    retl
205entry:
206  %tmp1 = load i16, i16* @s1
207  %tmp2 = sext i16 %tmp1 to i32
208  ret i32 %tmp2
209}
210
211define dso_local i8 @f13() {
212; X86-LABEL: f13:
213; X86:         movl $__emutls_v.b1, (%esp)
214; X86-NEXT:    calll __emutls_get_address
215; X86-NEXT:    movb (%eax), %al
216; X86-NEXT:    addl $12, %esp
217; X86-NEXT:    .cfi_def_cfa_offset 4
218; X86-NEXT:    retl
219entry:
220  %tmp1 = load i8, i8* @b1
221  ret i8 %tmp1
222}
223
224define dso_local i32 @f14() {
225; X86-LABEL: f14:
226; X86:         movl $__emutls_v.b1, (%esp)
227; X86-NEXT:    calll __emutls_get_address
228; X86-NEXT:    movsbl (%eax), %eax
229; X86-NEXT:    addl $12, %esp
230; X86-NEXT:    .cfi_def_cfa_offset 4
231; X86-NEXT:    retl
232entry:
233  %tmp1 = load i8, i8* @b1
234  %tmp2 = sext i8 %tmp1 to i32
235  ret i32 %tmp2
236}
237
238;;;;;;;;;;;;;; 32-bit __emutls_v. and __emutls_t.
239
240; X86-LABEL: __emutls_v.i1:
241; X86-NEXT: .long 4
242; X86-NEXT: .long 4
243; X86-NEXT: .long 0
244; X86-NEXT: .long __emutls_t.i1
245
246; X86-LABEL: __emutls_t.i1:
247; X86-NEXT: .long 15
248
249; X86-NOT:   __emutls_v.i2
250
251; X86-LABEL: __emutls_v.i3:
252; X86-NEXT: .long 4
253; X86-NEXT: .long 4
254; X86-NEXT: .long 0
255; X86-NEXT: .long __emutls_t.i3
256
257; X86-LABEL: __emutls_t.i3:
258; X86-NEXT: .long 15
259
260; X86-LABEL: __emutls_v.i4:
261; X86-NEXT: .long 4
262; X86-NEXT: .long 4
263; X86-NEXT: .long 0
264; X86-NEXT: .long __emutls_t.i4
265
266; X86-LABEL: __emutls_t.i4:
267; X86-NEXT: .long 15
268
269; X86-NOT:   __emutls_v.i5:
270; X86:      .hidden __emutls_v.i5
271; X86-NOT:   __emutls_v.i5:
272
273; X86-LABEL: __emutls_v.s1:
274; X86-NEXT: .long 2
275; X86-NEXT: .long 2
276; X86-NEXT: .long 0
277; X86-NEXT: .long __emutls_t.s1
278
279; X86-LABEL: __emutls_t.s1:
280; X86-NEXT: .short 15
281
282; X86-LABEL: __emutls_v.b1:
283; X86-NEXT: .long 1
284; X86-NEXT: .long 1
285; X86-NEXT: .long 0
286; X86-NEXT: .long 0
287
288; X86-NOT:   __emutls_t.b1
289
290;;;;;;;;;;;;;; 64-bit __emutls_v. and __emutls_t.
291
292; X64-LABEL: __emutls_v.i1:
293; X64-NEXT: .quad 4
294; X64-NEXT: .quad 4
295; X64-NEXT: .quad 0
296; X64-NEXT: .quad __emutls_t.i1
297
298; X64-LABEL: __emutls_t.i1:
299; X64-NEXT: .long 15
300
301; X64-NOT:   __emutls_v.i2
302
303; X64-LABEL: __emutls_v.i3:
304; X64-NEXT: .quad 4
305; X64-NEXT: .quad 4
306; X64-NEXT: .quad 0
307; X64-NEXT: .quad __emutls_t.i3
308
309; X64-LABEL: __emutls_t.i3:
310; X64-NEXT: .long 15
311
312; X64-LABEL: __emutls_v.i4:
313; X64-NEXT: .quad 4
314; X64-NEXT: .quad 4
315; X64-NEXT: .quad 0
316; X64-NEXT: .quad __emutls_t.i4
317
318; X64-LABEL: __emutls_t.i4:
319; X64-NEXT: .long 15
320
321; X64-NOT:   __emutls_v.i5:
322; X64:      .hidden __emutls_v.i5
323; X64-NOT:   __emutls_v.i5:
324
325; X64-LABEL: __emutls_v.s1:
326; X64-NEXT: .quad 2
327; X64-NEXT: .quad 2
328; X64-NEXT: .quad 0
329; X64-NEXT: .quad __emutls_t.s1
330
331; X64-LABEL: __emutls_t.s1:
332; X64-NEXT: .short 15
333
334; X64-LABEL: __emutls_v.b1:
335; X64-NEXT: .quad 1
336; X64-NEXT: .quad 1
337; X64-NEXT: .quad 0
338; X64-NEXT: .quad 0
339
340; X64-NOT:  __emutls_t.b1
341