1 // Test frontend handling of __sync builtins.
2 // Modified from a gcc testcase.
3 // RUN: %clang_cc1 -triple x86_64-apple-darwin -emit-llvm %s -o - | FileCheck %s
4 
5 signed char sc;
6 unsigned char uc;
7 signed short ss;
8 unsigned short us;
9 signed int si;
10 unsigned int ui;
11 signed long long sll;
12 unsigned long long ull;
13 __int128 s128;
14 unsigned  __int128 u128;
15 
test_op_ignore(void)16 void test_op_ignore (void) // CHECK-LABEL: define{{.*}} void @test_op_ignore
17 {
18   (void) __sync_fetch_and_add (&sc, 1); // CHECK: atomicrmw add i8* {{.*}} seq_cst, align 1
19   (void) __sync_fetch_and_add (&uc, 1); // CHECK: atomicrmw add i8* {{.*}} seq_cst, align 1
20   (void) __sync_fetch_and_add (&ss, 1); // CHECK: atomicrmw add i16* {{.*}} seq_cst, align 2
21   (void) __sync_fetch_and_add (&us, 1); // CHECK: atomicrmw add i16* {{.*}} seq_cst, align 2
22   (void) __sync_fetch_and_add (&si, 1); // CHECK: atomicrmw add i32* {{.*}} seq_cst, align 4
23   (void) __sync_fetch_and_add (&ui, 1); // CHECK: atomicrmw add i32* {{.*}} seq_cst, align 4
24   (void) __sync_fetch_and_add (&sll, 1); // CHECK: atomicrmw add i64* {{.*}} seq_cst, align 8
25   (void) __sync_fetch_and_add (&ull, 1); // CHECK: atomicrmw add i64* {{.*}} seq_cst, align 8
26 
27   (void) __sync_fetch_and_sub (&sc, 1); // CHECK: atomicrmw sub i8* {{.*}} seq_cst, align 1
28   (void) __sync_fetch_and_sub (&uc, 1); // CHECK: atomicrmw sub i8* {{.*}} seq_cst, align 1
29   (void) __sync_fetch_and_sub (&ss, 1); // CHECK: atomicrmw sub i16* {{.*}} seq_cst, align 2
30   (void) __sync_fetch_and_sub (&us, 1); // CHECK: atomicrmw sub i16* {{.*}} seq_cst, align 2
31   (void) __sync_fetch_and_sub (&si, 1); // CHECK: atomicrmw sub i32* {{.*}} seq_cst, align 4
32   (void) __sync_fetch_and_sub (&ui, 1); // CHECK: atomicrmw sub i32* {{.*}} seq_cst, align 4
33   (void) __sync_fetch_and_sub (&sll, 1); // CHECK: atomicrmw sub i64* {{.*}} seq_cst, align 8
34   (void) __sync_fetch_and_sub (&ull, 1); // CHECK: atomicrmw sub i64* {{.*}} seq_cst, align 8
35 
36   (void) __sync_fetch_and_or (&sc, 1); // CHECK: atomicrmw or i8* {{.*}} seq_cst, align 1
37   (void) __sync_fetch_and_or (&uc, 1); // CHECK: atomicrmw or i8* {{.*}} seq_cst, align 1
38   (void) __sync_fetch_and_or (&ss, 1); // CHECK: atomicrmw or i16* {{.*}} seq_cst, align 2
39   (void) __sync_fetch_and_or (&us, 1); // CHECK: atomicrmw or i16* {{.*}} seq_cst, align 2
40   (void) __sync_fetch_and_or (&si, 1); // CHECK: atomicrmw or i32* {{.*}} seq_cst, align 4
41   (void) __sync_fetch_and_or (&ui, 1); // CHECK: atomicrmw or i32* {{.*}} seq_cst, align 4
42   (void) __sync_fetch_and_or (&sll, 1); // CHECK: atomicrmw or i64* {{.*}} seq_cst, align 8
43   (void) __sync_fetch_and_or (&ull, 1); // CHECK: atomicrmw or i64* {{.*}} seq_cst, align 8
44 
45   (void) __sync_fetch_and_xor (&sc, 1); // CHECK: atomicrmw xor i8* {{.*}} seq_cst, align 1
46   (void) __sync_fetch_and_xor (&uc, 1); // CHECK: atomicrmw xor i8* {{.*}} seq_cst, align 1
47   (void) __sync_fetch_and_xor (&ss, 1); // CHECK: atomicrmw xor i16* {{.*}} seq_cst, align 2
48   (void) __sync_fetch_and_xor (&us, 1); // CHECK: atomicrmw xor i16* {{.*}} seq_cst, align 2
49   (void) __sync_fetch_and_xor (&si, 1); // CHECK: atomicrmw xor i32* {{.*}} seq_cst, align 4
50   (void) __sync_fetch_and_xor (&ui, 1); // CHECK: atomicrmw xor i32* {{.*}} seq_cst, align 4
51   (void) __sync_fetch_and_xor (&sll, 1); // CHECK: atomicrmw xor i64* {{.*}} seq_cst, align 8
52   (void) __sync_fetch_and_xor (&ull, 1); // CHECK: atomicrmw xor i64* {{.*}} seq_cst, align 8
53   (void) __sync_fetch_and_xor (&u128, 1); // CHECK: atomicrmw xor i128* {{.*}} seq_cst, align 16
54   (void) __sync_fetch_and_xor (&s128, 1); // CHECK: atomicrmw xor i128* {{.*}} seq_cst, align 16
55 
56   (void) __sync_fetch_and_nand (&sc, 1); // CHECK: atomicrmw nand i8* {{.*}} seq_cst, align 1
57   (void) __sync_fetch_and_nand (&uc, 1); // CHECK: atomicrmw nand i8* {{.*}} seq_cst, align 1
58   (void) __sync_fetch_and_nand (&ss, 1); // CHECK: atomicrmw nand i16* {{.*}} seq_cst, align 2
59   (void) __sync_fetch_and_nand (&us, 1); // CHECK: atomicrmw nand i16* {{.*}} seq_cst, align 2
60   (void) __sync_fetch_and_nand (&si, 1); // CHECK: atomicrmw nand i32* {{.*}} seq_cst, align 4
61   (void) __sync_fetch_and_nand (&ui, 1); // CHECK: atomicrmw nand i32* {{.*}} seq_cst, align 4
62   (void) __sync_fetch_and_nand (&sll, 1); // CHECK: atomicrmw nand i64* {{.*}} seq_cst, align 8
63   (void) __sync_fetch_and_nand (&ull, 1); // CHECK: atomicrmw nand i64* {{.*}} seq_cst, align 8
64 
65   (void) __sync_fetch_and_and (&sc, 1); // CHECK: atomicrmw and i8* {{.*}} seq_cst, align 1
66   (void) __sync_fetch_and_and (&uc, 1); // CHECK: atomicrmw and i8* {{.*}} seq_cst, align 1
67   (void) __sync_fetch_and_and (&ss, 1); // CHECK: atomicrmw and i16* {{.*}} seq_cst, align 2
68   (void) __sync_fetch_and_and (&us, 1); // CHECK: atomicrmw and i16* {{.*}} seq_cst, align 2
69   (void) __sync_fetch_and_and (&si, 1); // CHECK: atomicrmw and i32* {{.*}} seq_cst, align 4
70   (void) __sync_fetch_and_and (&ui, 1); // CHECK: atomicrmw and i32* {{.*}} seq_cst, align 4
71   (void) __sync_fetch_and_and (&sll, 1); // CHECK: atomicrmw and i64* {{.*}} seq_cst, align 8
72   (void) __sync_fetch_and_and (&ull, 1); // CHECK: atomicrmw and i64* {{.*}} seq_cst, align 8
73 
74 }
75 
test_fetch_and_op(void)76 void test_fetch_and_op (void) // CHECK-LABEL: define{{.*}} void @test_fetch_and_op
77 {
78   sc = __sync_fetch_and_add (&sc, 11); // CHECK: atomicrmw add
79   uc = __sync_fetch_and_add (&uc, 11); // CHECK: atomicrmw add
80   ss = __sync_fetch_and_add (&ss, 11); // CHECK: atomicrmw add
81   us = __sync_fetch_and_add (&us, 11); // CHECK: atomicrmw add
82   si = __sync_fetch_and_add (&si, 11); // CHECK: atomicrmw add
83   ui = __sync_fetch_and_add (&ui, 11); // CHECK: atomicrmw add
84   sll = __sync_fetch_and_add (&sll, 11); // CHECK: atomicrmw add
85   ull = __sync_fetch_and_add (&ull, 11); // CHECK: atomicrmw add
86 
87   sc = __sync_fetch_and_sub (&sc, 11); // CHECK: atomicrmw sub
88   uc = __sync_fetch_and_sub (&uc, 11); // CHECK: atomicrmw sub
89   ss = __sync_fetch_and_sub (&ss, 11); // CHECK: atomicrmw sub
90   us = __sync_fetch_and_sub (&us, 11); // CHECK: atomicrmw sub
91   si = __sync_fetch_and_sub (&si, 11); // CHECK: atomicrmw sub
92   ui = __sync_fetch_and_sub (&ui, 11); // CHECK: atomicrmw sub
93   sll = __sync_fetch_and_sub (&sll, 11); // CHECK: atomicrmw sub
94   ull = __sync_fetch_and_sub (&ull, 11); // CHECK: atomicrmw sub
95 
96   sc = __sync_fetch_and_or (&sc, 11); // CHECK: atomicrmw or
97   uc = __sync_fetch_and_or (&uc, 11); // CHECK: atomicrmw or
98   ss = __sync_fetch_and_or (&ss, 11); // CHECK: atomicrmw or
99   us = __sync_fetch_and_or (&us, 11); // CHECK: atomicrmw or
100   si = __sync_fetch_and_or (&si, 11); // CHECK: atomicrmw or
101   ui = __sync_fetch_and_or (&ui, 11); // CHECK: atomicrmw or
102   sll = __sync_fetch_and_or (&sll, 11); // CHECK: atomicrmw or
103   ull = __sync_fetch_and_or (&ull, 11); // CHECK: atomicrmw or
104 
105   sc = __sync_fetch_and_xor (&sc, 11); // CHECK: atomicrmw xor
106   uc = __sync_fetch_and_xor (&uc, 11); // CHECK: atomicrmw xor
107   ss = __sync_fetch_and_xor (&ss, 11); // CHECK: atomicrmw xor
108   us = __sync_fetch_and_xor (&us, 11); // CHECK: atomicrmw xor
109   si = __sync_fetch_and_xor (&si, 11); // CHECK: atomicrmw xor
110   ui = __sync_fetch_and_xor (&ui, 11); // CHECK: atomicrmw xor
111   sll = __sync_fetch_and_xor (&sll, 11); // CHECK: atomicrmw xor
112   ull = __sync_fetch_and_xor (&ull, 11); // CHECK: atomicrmw xor
113 
114   sc = __sync_fetch_and_nand (&sc, 11); // CHECK: atomicrmw nand
115   uc = __sync_fetch_and_nand (&uc, 11); // CHECK: atomicrmw nand
116   ss = __sync_fetch_and_nand (&ss, 11); // CHECK: atomicrmw nand
117   us = __sync_fetch_and_nand (&us, 11); // CHECK: atomicrmw nand
118   si = __sync_fetch_and_nand (&si, 11); // CHECK: atomicrmw nand
119   ui = __sync_fetch_and_nand (&ui, 11); // CHECK: atomicrmw nand
120   sll = __sync_fetch_and_nand (&sll, 11); // CHECK: atomicrmw nand
121   ull = __sync_fetch_and_nand (&ull, 11); // CHECK: atomicrmw nand
122 
123   sc = __sync_fetch_and_and (&sc, 11); // CHECK: atomicrmw and
124   uc = __sync_fetch_and_and (&uc, 11); // CHECK: atomicrmw and
125   ss = __sync_fetch_and_and (&ss, 11); // CHECK: atomicrmw and
126   us = __sync_fetch_and_and (&us, 11); // CHECK: atomicrmw and
127   si = __sync_fetch_and_and (&si, 11); // CHECK: atomicrmw and
128   ui = __sync_fetch_and_and (&ui, 11); // CHECK: atomicrmw and
129   sll = __sync_fetch_and_and (&sll, 11); // CHECK: atomicrmw and
130   ull = __sync_fetch_and_and (&ull, 11); // CHECK: atomicrmw and
131 
132 }
133 
test_op_and_fetch(void)134 void test_op_and_fetch (void)
135 {
136   sc = __sync_add_and_fetch (&sc, uc); // CHECK: atomicrmw add
137   uc = __sync_add_and_fetch (&uc, uc); // CHECK: atomicrmw add
138   ss = __sync_add_and_fetch (&ss, uc); // CHECK: atomicrmw add
139   us = __sync_add_and_fetch (&us, uc); // CHECK: atomicrmw add
140   si = __sync_add_and_fetch (&si, uc); // CHECK: atomicrmw add
141   ui = __sync_add_and_fetch (&ui, uc); // CHECK: atomicrmw add
142   sll = __sync_add_and_fetch (&sll, uc); // CHECK: atomicrmw add
143   ull = __sync_add_and_fetch (&ull, uc); // CHECK: atomicrmw add
144 
145   sc = __sync_sub_and_fetch (&sc, uc); // CHECK: atomicrmw sub
146   uc = __sync_sub_and_fetch (&uc, uc); // CHECK: atomicrmw sub
147   ss = __sync_sub_and_fetch (&ss, uc); // CHECK: atomicrmw sub
148   us = __sync_sub_and_fetch (&us, uc); // CHECK: atomicrmw sub
149   si = __sync_sub_and_fetch (&si, uc); // CHECK: atomicrmw sub
150   ui = __sync_sub_and_fetch (&ui, uc); // CHECK: atomicrmw sub
151   sll = __sync_sub_and_fetch (&sll, uc); // CHECK: atomicrmw sub
152   ull = __sync_sub_and_fetch (&ull, uc); // CHECK: atomicrmw sub
153 
154   sc = __sync_or_and_fetch (&sc, uc); // CHECK: atomicrmw or
155   uc = __sync_or_and_fetch (&uc, uc); // CHECK: atomicrmw or
156   ss = __sync_or_and_fetch (&ss, uc); // CHECK: atomicrmw or
157   us = __sync_or_and_fetch (&us, uc); // CHECK: atomicrmw or
158   si = __sync_or_and_fetch (&si, uc); // CHECK: atomicrmw or
159   ui = __sync_or_and_fetch (&ui, uc); // CHECK: atomicrmw or
160   sll = __sync_or_and_fetch (&sll, uc); // CHECK: atomicrmw or
161   ull = __sync_or_and_fetch (&ull, uc); // CHECK: atomicrmw or
162 
163   sc = __sync_xor_and_fetch (&sc, uc); // CHECK: atomicrmw xor
164   uc = __sync_xor_and_fetch (&uc, uc); // CHECK: atomicrmw xor
165   ss = __sync_xor_and_fetch (&ss, uc); // CHECK: atomicrmw xor
166   us = __sync_xor_and_fetch (&us, uc); // CHECK: atomicrmw xor
167   si = __sync_xor_and_fetch (&si, uc); // CHECK: atomicrmw xor
168   ui = __sync_xor_and_fetch (&ui, uc); // CHECK: atomicrmw xor
169   sll = __sync_xor_and_fetch (&sll, uc); // CHECK: atomicrmw xor
170   ull = __sync_xor_and_fetch (&ull, uc); // CHECK: atomicrmw xor
171 
172   sc = __sync_nand_and_fetch (&sc, uc); // CHECK: atomicrmw nand
173                                         // CHECK: and
174                                         // CHECK: xor
175                                         // CHECK: -1
176   uc = __sync_nand_and_fetch (&uc, uc); // CHECK: atomicrmw nand
177                                         // CHECK: and
178                                         // CHECK: xor
179                                         // CHECK: -1
180   ss = __sync_nand_and_fetch (&ss, uc); // CHECK: atomicrmw nand
181                                         // CHECK: and
182                                         // CHECK: xor
183                                         // CHECK: -1
184   us = __sync_nand_and_fetch (&us, uc); // CHECK: atomicrmw nand
185                                         // CHECK: and
186                                         // CHECK: xor
187                                         // CHECK: -1
188   si = __sync_nand_and_fetch (&si, uc); // CHECK: atomicrmw nand
189                                         // CHECK: and
190                                         // CHECK: xor
191                                         // CHECK: -1
192   ui = __sync_nand_and_fetch (&ui, uc); // CHECK: atomicrmw nand
193                                         // CHECK: and
194                                         // CHECK: xor
195                                         // CHECK: -1
196   sll = __sync_nand_and_fetch (&sll, uc); // CHECK: atomicrmw nand
197                                           // CHECK: and
198                                           // CHECK: xor
199                                           // CHECK: -1
200   ull = __sync_nand_and_fetch (&ull, uc); // CHECK: atomicrmw nand
201                                           // CHECK: and
202                                           // CHECK: xor
203                                           // CHECK: -1
204   u128 = __sync_nand_and_fetch (&u128, uc); // CHECK: atomicrmw nand
205                                           // CHECK: and
206                                           // CHECK: xor
207                                           // CHECK: -1
208   s128 = __sync_nand_and_fetch (&s128, uc); // CHECK: atomicrmw nand
209                                           // CHECK: and
210                                           // CHECK: xor
211                                           // CHECK: -1
212 
213   sc = __sync_and_and_fetch (&sc, uc); // CHECK: atomicrmw and
214   uc = __sync_and_and_fetch (&uc, uc); // CHECK: atomicrmw and
215   ss = __sync_and_and_fetch (&ss, uc); // CHECK: atomicrmw and
216   us = __sync_and_and_fetch (&us, uc); // CHECK: atomicrmw and
217   si = __sync_and_and_fetch (&si, uc); // CHECK: atomicrmw and
218   ui = __sync_and_and_fetch (&ui, uc); // CHECK: atomicrmw and
219   sll = __sync_and_and_fetch (&sll, uc); // CHECK: atomicrmw and
220   ull = __sync_and_and_fetch (&ull, uc); // CHECK: atomicrmw and
221 
222 }
223 
test_compare_and_swap(void)224 void test_compare_and_swap (void)
225 {
226   sc = __sync_val_compare_and_swap (&sc, uc, sc);
227   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i8* {{.*}} seq_cst, align 1
228   // CHECK: extractvalue { i8, i1 } [[PAIR]], 0
229 
230   uc = __sync_val_compare_and_swap (&uc, uc, sc);
231   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i8* {{.*}} seq_cst, align 1
232   // CHECK: extractvalue { i8, i1 } [[PAIR]], 0
233 
234   ss = __sync_val_compare_and_swap (&ss, uc, sc);
235   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i16* {{.*}} seq_cst, align 2
236   // CHECK: extractvalue { i16, i1 } [[PAIR]], 0
237 
238   us = __sync_val_compare_and_swap (&us, uc, sc);
239   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i16* {{.*}} seq_cst, align 2
240   // CHECK: extractvalue { i16, i1 } [[PAIR]], 0
241 
242   si = __sync_val_compare_and_swap (&si, uc, sc);
243   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i32* {{.*}} seq_cst, align 4
244   // CHECK: extractvalue { i32, i1 } [[PAIR]], 0
245 
246   ui = __sync_val_compare_and_swap (&ui, uc, sc);
247   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i32* {{.*}} seq_cst, align 4
248   // CHECK: extractvalue { i32, i1 } [[PAIR]], 0
249 
250   sll = __sync_val_compare_and_swap (&sll, uc, sc);
251   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i64* {{.*}} seq_cst, align 8
252   // CHECK: extractvalue { i64, i1 } [[PAIR]], 0
253 
254   ull = __sync_val_compare_and_swap (&ull, uc, sc);
255   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i64* {{.*}} seq_cst, align 8
256   // CHECK: extractvalue { i64, i1 } [[PAIR]], 0
257 
258 
259   ui = __sync_bool_compare_and_swap (&sc, uc, sc);
260   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i8* {{.*}} seq_cst, align 1
261   // CHECK: extractvalue { i8, i1 } [[PAIR]], 1
262 
263   ui = __sync_bool_compare_and_swap (&uc, uc, sc);
264   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i8* {{.*}} seq_cst, align 1
265   // CHECK: extractvalue { i8, i1 } [[PAIR]], 1
266 
267   ui = __sync_bool_compare_and_swap (&ss, uc, sc);
268   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i16* {{.*}} seq_cst, align 2
269   // CHECK: extractvalue { i16, i1 } [[PAIR]], 1
270 
271   ui = __sync_bool_compare_and_swap (&us, uc, sc);
272   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i16* {{.*}} seq_cst, align 2
273   // CHECK: extractvalue { i16, i1 } [[PAIR]], 1
274 
275   ui = __sync_bool_compare_and_swap (&si, uc, sc);
276   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i32* {{.*}} seq_cst, align 4
277   // CHECK: extractvalue { i32, i1 } [[PAIR]], 1
278 
279   ui = __sync_bool_compare_and_swap (&ui, uc, sc);
280   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i32* {{.*}} seq_cst, align 4
281   // CHECK: extractvalue { i32, i1 } [[PAIR]], 1
282 
283   ui = __sync_bool_compare_and_swap (&sll, uc, sc);
284   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i64* {{.*}} seq_cst, align 8
285   // CHECK: extractvalue { i64, i1 } [[PAIR]], 1
286 
287   ui = __sync_bool_compare_and_swap (&ull, uc, sc);
288   // CHECK: [[PAIR:%[a-z0-9._]+]] = cmpxchg i64* {{.*}} seq_cst, align 8
289   // CHECK: extractvalue { i64, i1 } [[PAIR]], 1
290 }
291 
test_lock(void)292 void test_lock (void)
293 {
294   sc = __sync_lock_test_and_set (&sc, 1); // CHECK: atomicrmw xchg i8* {{.*}} seq_cst, align 1
295   uc = __sync_lock_test_and_set (&uc, 1); // CHECK: atomicrmw xchg i8* {{.*}} seq_cst, align 1
296   ss = __sync_lock_test_and_set (&ss, 1); // CHECK: atomicrmw xchg i16* {{.*}} seq_cst, align 2
297   us = __sync_lock_test_and_set (&us, 1); // CHECK: atomicrmw xchg i16* {{.*}} seq_cst, align 2
298   si = __sync_lock_test_and_set (&si, 1); // CHECK: atomicrmw xchg i32* {{.*}} seq_cst, align 4
299   ui = __sync_lock_test_and_set (&ui, 1); // CHECK: atomicrmw xchg i32* {{.*}} seq_cst, align 4
300   sll = __sync_lock_test_and_set (&sll, 1); // CHECK: atomicrmw xchg i64* {{.*}} seq_cst, align 8
301   ull = __sync_lock_test_and_set (&ull, 1); // CHECK: atomicrmw xchg i64* {{.*}} seq_cst, align 8
302 
303   __sync_synchronize (); // CHECK: fence seq_cst
304 
305   __sync_lock_release (&sc); // CHECK: store atomic {{.*}} release, align 1
306   __sync_lock_release (&uc); // CHECK: store atomic {{.*}} release, align 1
307   __sync_lock_release (&ss); // CHECK: store atomic {{.*}} release, align 2
308   __sync_lock_release (&us); /// CHECK: store atomic {{.*}} release, align 2
309   __sync_lock_release (&si); // CHECK: store atomic {{.*}} release, align 4
310   __sync_lock_release (&ui); // CHECK: store atomic {{.*}} release, align 4
311   __sync_lock_release (&sll); // CHECK: store atomic {{.*}} release, align 8
312   __sync_lock_release (&ull); // CHECK: store atomic {{.*}} release, align 8
313 }
314 
test_atomic(void)315 void test_atomic(void) {
316   ui = __atomic_fetch_min(&ui, 5, __ATOMIC_RELAXED); // CHECK: atomicrmw umin {{.*}} monotonic, align 4
317   si = __atomic_fetch_min(&si, 5, __ATOMIC_SEQ_CST); // CHECK: atomicrmw min {{.*}} seq_cst, align 4
318   ui = __atomic_fetch_max(&ui, 5, __ATOMIC_ACQUIRE); // CHECK: atomicrmw umax {{.*}} acquire, align 4
319   si = __atomic_fetch_max(&si, 5, __ATOMIC_RELEASE); // CHECK: atomicrmw max {{.*}} release, align 4
320 }
321