1 // RUN: %clang_cc1 -DCK1 -verify -fopenmp -fopenmp-version=50 -fopenmp-targets=powerpc64le-ibm-linux-gnu -x c++ -triple powerpc64le-unknown-unknown -emit-llvm %s -o - | FileCheck %s
2 // RUN: %clang_cc1 -DCK1 -fopenmp -fopenmp-version=50 -fopenmp-targets=powerpc64le-ibm-linux-gnu -x c++ -std=c++11 -triple powerpc64le-unknown-unknown -emit-pch -o %t %s
3 // RUN: %clang_cc1 -fopenmp -fopenmp-version=50 -fopenmp-targets=powerpc64le-ibm-linux-gnu -x c++ -triple powerpc64le-unknown-unknown -std=c++11 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s
4 
5 // RUN: %clang_cc1 -DCK1 -verify -fopenmp-simd -fopenmp-version=50 -fopenmp-targets=powerpc64le-ibm-linux-gnu -x c++ -triple powerpc64le-unknown-unknown -emit-llvm %s -o - | FileCheck --check-prefix SIMD-ONLY0 %s
6 // RUN: %clang_cc1 -DCK1 -fopenmp-simd -fopenmp-version=50 -fopenmp-targets=powerpc64le-ibm-linux-gnu -x c++ -std=c++11 -triple powerpc64le-unknown-unknown -emit-pch -o %t %s
7 // RUN: %clang_cc1 -fopenmp-simd -fopenmp-version=50 -fopenmp-targets=powerpc64le-ibm-linux-gnu -x c++ -triple powerpc64le-unknown-unknown -std=c++11 -include-pch %t -verify %s -emit-llvm -o - | FileCheck --check-prefix SIMD-ONLY0 %s
8 // SIMD-ONLY0-NOT: {{__kmpc|__tgt}}
9 // expected-no-diagnostics
10 
11 #ifndef HEADER
12 #define HEADER
13 
14 // CHECK-DAG: [[SIZES1:@.+]] = private unnamed_addr constant [5 x i64] zeroinitializer
15 // 64 = 0x40 = OMP_MAP_RETURN_PARAM
16 // CHECK-DAG: [[MAPTYPES1:@.+]] = private unnamed_addr constant [5 x i64] [i64 64, i64 64, i64 64, i64 64, i64 64]
17 // 0 = OMP_MAP_NONE
18 // 281474976710720 = 0x1000000000040 = OMP_MAP_MEMBER_OF | OMP_MAP_RETURN_PARAM
19 // CHECK-DAG: [[MAPTYPES2:@.+]] = private unnamed_addr constant [5 x i64] [i64 0, i64 281474976710720, i64 281474976710720, i64 281474976710720, i64 281474976710720]
20 struct S {
21   int a = 0;
22   int *ptr = &a;
23   int &ref = a;
24   int arr[4];
SS25   S() {}
fooS26   void foo() {
27 #pragma omp target data use_device_addr(a, ptr [3:4], ref, ptr[0], arr[:a])
28     ++a, ++*ptr, ++ref, ++arr[0];
29   }
30 };
31 
main()32 int main() {
33   float a = 0;
34   float *ptr = &a;
35   float &ref = a;
36   float arr[4];
37   float vla[(int)a];
38   S s;
39   s.foo();
40 #pragma omp target data use_device_addr(a, ptr [3:4], ref, ptr[0], arr[:(int)a], vla[0])
41   ++a, ++*ptr, ++ref, ++arr[0], ++vla[0];
42   return a;
43 }
44 
45 // CHECK-LABEL: @main()
46 // CHECK: [[A_ADDR:%.+]] = alloca float,
47 // CHECK: [[PTR_ADDR:%.+]] = alloca float*,
48 // CHECK: [[REF_ADDR:%.+]] = alloca float*,
49 // CHECK: [[ARR_ADDR:%.+]] = alloca [4 x float],
50 // CHECK: [[BPTRS:%.+]] = alloca [5 x i8*],
51 // CHECK: [[PTRS:%.+]] = alloca [5 x i8*],
52 // CHECK: [[VLA_ADDR:%.+]] = alloca float, i64 %{{.+}},
53 // CHECK: [[PTR:%.+]] = load float*, float** [[PTR_ADDR]],
54 // CHECK: [[REF:%.+]] = load float*, float** [[REF_ADDR]],
55 // CHECK: [[ARR:%.+]] = getelementptr inbounds [4 x float], [4 x float]* [[ARR_ADDR]], i64 0, i64 0
56 // CHECK: [[BPTR0:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 0
57 // CHECK: [[BPTR0_A_ADDR:%.+]] = bitcast i8** [[BPTR0]] to float**
58 // CHECK: store float* [[A_ADDR]], float** [[BPTR0_A_ADDR]],
59 // CHECK: [[PTR0:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 0
60 // CHECK: [[PTR0_A_ADDR:%.+]] = bitcast i8** [[PTR0]] to float**
61 // CHECK: store float* [[A_ADDR]], float** [[PTR0_A_ADDR]],
62 // CHECK: [[BPTR1:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 1
63 // CHECK: [[BPTR1_PTR_ADDR:%.+]] = bitcast i8** [[BPTR1]] to float**
64 // CHECK: store float* [[PTR]], float** [[BPTR1_PTR_ADDR]],
65 // CHECK: [[PTR1:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 1
66 // CHECK: [[PTR1_PTR_ADDR:%.+]] = bitcast i8** [[PTR1]] to float**
67 // CHECK: store float* [[PTR]], float** [[PTR1_PTR_ADDR]],
68 // CHECK: [[BPTR2:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 2
69 // CHECK: [[BPTR2_REF_ADDR:%.+]] = bitcast i8** [[BPTR2]] to float**
70 // CHECK: store float* [[REF]], float** [[BPTR2_REF_ADDR]],
71 // CHECK: [[PTR2:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 2
72 // CHECK: [[PTR2_REF_ADDR:%.+]] = bitcast i8** [[PTR2]] to float**
73 // CHECK: store float* [[REF]], float** [[PTR2_REF_ADDR]],
74 // CHECK: [[BPTR3:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 3
75 // CHECK: [[BPTR3_ARR_ADDR:%.+]] = bitcast i8** [[BPTR3]] to float**
76 // CHECK: store float* [[ARR]], float** [[BPTR3_ARR_ADDR]],
77 // CHECK: [[PTR3:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 3
78 // CHECK: [[PTR3_ARR_ADDR:%.+]] = bitcast i8** [[PTR3]] to float**
79 // CHECK: store float* [[ARR]], float** [[PTR3_ARR_ADDR]],
80 // CHECK: [[BPTR4:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 4
81 // CHECK: [[BPTR4_VLA_ADDR:%.+]] = bitcast i8** [[BPTR4]] to float**
82 // CHECK: store float* [[VLA_ADDR]], float** [[BPTR4_VLA_ADDR]],
83 // CHECK: [[PTR4:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 4
84 // CHECK: [[PTR4_VLA_ADDR:%.+]] = bitcast i8** [[PTR4]] to float**
85 // CHECK: store float* [[VLA_ADDR]], float** [[PTR4_VLA_ADDR]],
86 // CHECK: [[BPTR:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 0
87 // CHECK: [[PTR:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 0
88 // CHECK: call void @__tgt_target_data_begin_mapper(%struct.ident_t* @{{.+}}, i64 -1, i32 5, i8** [[BPTR]], i8** [[PTR]], i64* getelementptr inbounds ([5 x i64], [5 x i64]* [[SIZES1]], i32 0, i32 0), i64* getelementptr inbounds ([5 x i64], [5 x i64]* [[MAPTYPES1]], i32 0, i32 0), i8** null, i8** null)
89 // CHECK: [[A_REF:%.+]] = load float*, float** [[BPTR0_A_ADDR]],
90 // CHECK: [[REF_REF:%.+]] = load float*, float** [[BPTR2_REF_ADDR]],
91 // CHECK: store float* [[REF_REF]], float** [[TMP_REF_ADDR:%.+]],
92 // CHECK: [[ARR:%.+]] = load float*, float** [[BPTR3_ARR_ADDR]],
93 // CHECK: [[ARR_REF:%.+]] = bitcast float* [[ARR]] to [4 x float]*
94 // CHECK: [[VLA_REF:%.+]] = load float*, float** [[BPTR4_VLA_ADDR]],
95 // CHECK: [[A:%.+]] = load float, float* [[A_REF]],
96 // CHECK: [[INC:%.+]] = fadd float [[A]], 1.000000e+00
97 // CHECK: store float [[INC]], float* [[A_REF]],
98 // CHECK: [[PTR_ADDR:%.+]] = load float*, float** [[BPTR1_PTR_ADDR]],
99 // CHECK: [[VAL:%.+]] = load float, float* [[PTR_ADDR]],
100 // CHECK: [[INC:%.+]] = fadd float [[VAL]], 1.000000e+00
101 // CHECK: store float [[INC]], float* [[PTR_ADDR]],
102 // CHECK: [[REF_ADDR:%.+]] = load float*, float** [[TMP_REF_ADDR]],
103 // CHECK: [[REF:%.+]] = load float, float* [[REF_ADDR]],
104 // CHECK: [[INC:%.+]] = fadd float [[REF]], 1.000000e+00
105 // CHECK: store float [[INC]], float* [[REF_ADDR]],
106 // CHECK: [[ARR0_ADDR:%.+]] = getelementptr inbounds [4 x float], [4 x float]* [[ARR_REF]], i64 0, i64 0
107 // CHECK: [[ARR0:%.+]] = load float, float* [[ARR0_ADDR]],
108 // CHECK: [[INC:%.+]] = fadd float [[ARR0]], 1.000000e+00
109 // CHECK: store float [[INC]], float* [[ARR0_ADDR]],
110 // CHECK: [[VLA0_ADDR:%.+]] = getelementptr inbounds float, float* [[VLA_REF]], i64 0
111 // CHECK: [[VLA0:%.+]] = load float, float* [[VLA0_ADDR]],
112 // CHECK: [[INC:%.+]] = fadd float [[VLA0]], 1.000000e+00
113 // CHECK: store float [[INC]], float* [[VLA0_ADDR]],
114 // CHECK: [[BPTR:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 0
115 // CHECK: [[PTR:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 0
116 // CHECK: call void @__tgt_target_data_end_mapper(%struct.ident_t* @{{.+}}, i64 -1, i32 5, i8** [[BPTR]], i8** [[PTR]], i64* getelementptr inbounds ([5 x i64], [5 x i64]* [[SIZES1]], i32 0, i32 0), i64* getelementptr inbounds ([5 x i64], [5 x i64]* [[MAPTYPES1]], i32 0, i32 0), i8** null, i8** null)
117 
118 // CHECK: foo
119 // %this.addr = alloca %struct.S*, align 8
120 // CHECK: [[BPTRS:%.+]] = alloca [5 x i8*],
121 // CHECK: [[PTRS:%.+]] = alloca [5 x i8*],
122 // CHECK: [[SIZES:%.+]] = alloca [5 x i64],
123 // %tmp = alloca i32*, align 8
124 // %tmp6 = alloca i32**, align 8
125 // %tmp7 = alloca i32*, align 8
126 // %tmp8 = alloca i32**, align 8
127 // %tmp9 = alloca [4 x i32]*, align 8
128 // store %struct.S* %this, %struct.S** %this.addr, align 8
129 // %this1 = load %struct.S*, %struct.S** %this.addr, align 8
130 // CHECK: [[A_ADDR:%.+]] = getelementptr inbounds %struct.S, %struct.S* [[THIS:%.+]], i32 0, i32 0
131 // %ptr = getelementptr inbounds %struct.S, %struct.S* %this1, i32 0, i32 1
132 // %ref = getelementptr inbounds %struct.S, %struct.S* %this1, i32 0, i32 2
133 // %0 = load i32*, i32** %ref, align 8
134 // CHECK: [[ARR_ADDR:%.+]] = getelementptr inbounds %struct.S, %struct.S* [[THIS]], i32 0, i32 3
135 // CHECK: [[A_ADDR2:%.+]] = getelementptr inbounds %struct.S, %struct.S* [[THIS]], i32 0, i32 0
136 // CHECK: [[PTR_ADDR:%.+]] = getelementptr inbounds %struct.S, %struct.S* [[THIS]], i32 0, i32 1
137 // CHECK: [[REF_REF:%.+]] = getelementptr inbounds %struct.S, %struct.S* [[THIS]], i32 0, i32 2
138 // CHECK: [[REF_PTR:%.+]] = load i32*, i32** [[REF_REF]],
139 // CHECK: [[ARR_ADDR2:%.+]] = getelementptr inbounds %struct.S, %struct.S* [[THIS]], i32 0, i32 3
140 // CHECK: [[ARR_END:%.+]] = getelementptr [4 x i32], [4 x i32]* [[ARR_ADDR]], i32 1
141 // CHECK: [[BEGIN:%.+]] = bitcast i32* [[A_ADDR]] to i8*
142 // CHECK: [[END:%.+]] = bitcast [4 x i32]* [[ARR_END]] to i8*
143 // CHECK: [[E:%.+]] = ptrtoint i8* [[END]] to i64
144 // CHECK: [[B:%.+]] = ptrtoint i8* [[BEGIN]] to i64
145 // CHECK: [[DIFF:%.+]] = sub i64 [[E]], [[B]]
146 // CHECK: [[SZ:%.+]] = sdiv exact i64 [[DIFF]], ptrtoint (i8* getelementptr (i8, i8* null, i32 1) to i64)
147 // CHECK: [[BPTR0:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 0
148 // CHECK: [[BPTR0_S:%.+]] = bitcast i8** [[BPTR0]] to %struct.S**
149 // CHECK: store %struct.S* [[THIS]], %struct.S** [[BPTR0_S]],
150 // CHECK: [[PTR0:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 0
151 // CHECK: [[PTR0_BEGIN:%.+]] = bitcast i8** [[PTR0]] to i32**
152 // CHECK: store i32* [[A_ADDR]], i32** [[PTR0_BEGIN]],
153 // CHECK: [[SIZE0:%.+]] = getelementptr inbounds [5 x i64], [5 x i64]* [[SIZES]], i32 0, i32 0
154 // CHECK: store i64 [[SZ]], i64* [[SIZE0]],
155 // CHECK: [[BPTR1:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 1
156 // CHECK: [[BPTR1_A_ADDR:%.+]] = bitcast i8** [[BPTR1]] to i32**
157 // CHECK: store i32* [[A_ADDR2]], i32** [[BPTR1_A_ADDR]],
158 // CHECK: [[PTR1:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 1
159 // CHECK: [[PTR1_A_ADDR:%.+]] = bitcast i8** [[PTR1]] to i32**
160 // CHECK: store i32* [[A_ADDR2]], i32** [[PTR1_A_ADDR]],
161 // CHECK: [[SIZE1:%.+]] = getelementptr inbounds [5 x i64], [5 x i64]* [[SIZES]], i32 0, i32 1
162 // CHECK: store i64 0, i64* [[SIZE1]],
163 // CHECK: [[BPTR2:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 2
164 // CHECK: [[BPTR2_PTR_ADDR:%.+]] = bitcast i8** [[BPTR2]] to i32***
165 // CHECK: store i32** [[PTR_ADDR]], i32*** [[BPTR2_PTR_ADDR]],
166 // CHECK: [[PTR2:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 2
167 // CHECK: [[PTR2_PTR_ADDR:%.+]] = bitcast i8** [[PTR2]] to i32***
168 // CHECK: store i32** [[PTR_ADDR]], i32*** [[PTR2_PTR_ADDR]],
169 // CHECK: [[SIZE2:%.+]] = getelementptr inbounds [5 x i64], [5 x i64]* [[SIZES]], i32 0, i32 2
170 // CHECK: store i64 0, i64* [[SIZE2]],
171 // CHECK: [[BPTR3:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 3
172 // CHECK: [[BPTR3_REF_PTR:%.+]] = bitcast i8** [[BPTR3]] to i32**
173 // CHECK: store i32* [[REF_PTR]], i32** [[BPTR3_REF_PTR]],
174 // CHECK: [[PTR3:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 3
175 // CHECK: [[PTR3_REF_PTR:%.+]] = bitcast i8** [[PTR3]] to i32**
176 // CHECK: store i32* [[REF_PTR]], i32** [[PTR3_REF_PTR]],
177 // CHECK: [[SIZE3:%.+]] = getelementptr inbounds [5 x i64], [5 x i64]* [[SIZES]], i32 0, i32 3
178 // CHECK: store i64 0, i64* [[SIZE3]],
179 // CHECK: [[BPTR4:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 4
180 // CHECK: [[BPTR4_ARR_ADDR:%.+]] = bitcast i8** [[BPTR4]] to [4 x i32]**
181 // CHECK: store [4 x i32]* [[ARR_ADDR2]], [4 x i32]** [[BPTR4_ARR_ADDR]],
182 // CHECK: [[PTR4:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 4
183 // CHECK: [[PTR4_ARR_ADDR:%.+]] = bitcast i8** [[PTR4]] to [4 x i32]**
184 // CHECK: store [4 x i32]* [[ARR_ADDR2]], [4 x i32]** [[PTR4_ARR_ADDR]],
185 // CHECK: [[SIZE4:%.+]] = getelementptr inbounds [5 x i64], [5 x i64]* [[SIZES]], i32 0, i32 4
186 // CHECK: store i64 0, i64* [[SIZE4]],
187 // CHECK: [[BPTR:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 0
188 // CHECK: [[PTR:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 0
189 // CHECK: [[SIZE:%.+]] = getelementptr inbounds [5 x i64], [5 x i64]* [[SIZES]], i32 0, i32 0
190 // CHECK: call void @__tgt_target_data_begin_mapper(%struct.ident_t* @{{.+}}, i64 -1, i32 5, i8** [[BPTR]], i8** [[PTR]], i64* [[SIZE]], i64* getelementptr inbounds ([5 x i64], [5 x i64]* [[MAPTYPES2]], i32 0, i32 0), i8** null, i8** null)
191 // CHECK: [[A_ADDR:%.+]] = load i32*, i32** [[BPTR1_A_ADDR]],
192 // CHECK: store i32* [[A_ADDR]], i32** [[A_REF:%.+]],
193 // CHECK: [[PTR_ADDR:%.+]] = load i32**, i32*** [[BPTR2_PTR_ADDR]],
194 // CHECK: store i32** [[PTR_ADDR]], i32*** [[PTR_REF:%.+]],
195 // CHECK: [[REF_PTR:%.+]] = load i32*, i32** [[BPTR3_REF_PTR]],
196 // CHECK: store i32* [[REF_PTR]], i32** [[REF_REF:%.+]],
197 // CHECK: [[PTR_ADDR:%.+]] = load i32**, i32*** [[BPTR2_PTR_ADDR]],
198 // CHECK: store i32** [[PTR_ADDR]], i32*** [[PTR_REF2:%.+]],
199 // CHECK: [[ARR_ADDR:%.+]] = load [4 x i32]*, [4 x i32]** [[BPTR4_ARR_ADDR]],
200 // CHECK: store [4 x i32]* [[ARR_ADDR]], [4 x i32]** [[ARR_REF:%.+]],
201 // CHECK: [[A_ADDR:%.+]] = load i32*, i32** [[A_REF]],
202 // CHECK: [[A:%.+]] = load i32, i32* [[A_ADDR]],
203 // CHECK: [[INC:%.+]] = add nsw i32 [[A]], 1
204 // CHECK: store i32 [[INC]], i32* [[A_ADDR]],
205 // CHECK: [[PTR_PTR:%.+]] = load i32**, i32*** [[PTR_REF2]],
206 // CHECK: [[PTR:%.+]] = load i32*, i32** [[PTR_PTR]],
207 // CHECK: [[VAL:%.+]] = load i32, i32* [[PTR]],
208 // CHECK: [[INC:%.+]] = add nsw i32 [[VAL]], 1
209 // CHECK: store i32 [[INC]], i32* [[PTR]],
210 // CHECK: [[REF_PTR:%.+]] = load i32*, i32** [[REF_REF]],
211 // CHECK: [[VAL:%.+]] = load i32, i32* [[REF_PTR]],
212 // CHECK: [[INC:%.+]] = add nsw i32 [[VAL]], 1
213 // CHECK: store i32 [[INC]], i32* [[REF_PTR]],
214 // CHECK: [[ARR_ADDR:%.+]] = load [4 x i32]*, [4 x i32]** [[ARR_REF]],
215 // CHECK: [[ARR0_ADDR:%.+]] = getelementptr inbounds [4 x i32], [4 x i32]* [[ARR_ADDR]], i64 0, i64 0
216 // CHECK: [[VAL:%.+]] = load i32, i32* [[ARR0_ADDR]],
217 // CHECK: [[INC:%.+]] = add nsw i32 [[VAL]], 1
218 // CHECK: store i32 [[INC]], i32* [[ARR0_ADDR]],
219 // CHECK: [[BPTR:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[BPTRS]], i32 0, i32 0
220 // CHECK: [[PTR:%.+]] = getelementptr inbounds [5 x i8*], [5 x i8*]* [[PTRS]], i32 0, i32 0
221 // CHECK: [[SIZE:%.+]] = getelementptr inbounds [5 x i64], [5 x i64]* [[SIZES]], i32 0, i32 0
222 // CHECK: call void @__tgt_target_data_end_mapper(%struct.ident_t* @{{.+}}, i64 -1, i32 5, i8** [[BPTR]], i8** [[PTR]], i64* [[SIZE]], i64* getelementptr inbounds ([5 x i64], [5 x i64]* [[MAPTYPES2]], i32 0, i32 0), i8** null, i8** null)
223 
224 #endif
225