1 /* $NetBSD: all_sync_ops_linkable.c,v 1.4 2014/02/21 10:26:25 martin Exp $ */
2 
3 /*-
4  * Copyright (c) 2014 The NetBSD Foundation, Inc.
5  * All rights reserved.
6  *
7  * This code is derived from software contributed to The NetBSD Foundation
8  * by Martin Husemann <martin@NetBSD.org>.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  * 1. Redistributions of source code must retain the above copyright
14  *    notice, this list of conditions and the following disclaimer.
15  * 2. Redistributions in binary form must reproduce the above copyright
16  *    notice, this list of conditions and the following disclaimer in the
17  *    documentation and/or other materials provided with the distribution.
18  *
19  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29  * POSSIBILITY OF SUCH DAMAGE.
30  */
31 
32 /*
33  * This is a simple link-time test to verify all builtin atomic sync
34  * operations are available. Depending on the exact cpu/arch code generator
35  * options, some of these need support functions (which on NetBSD we
36  * typically provide in src/common/lib/libc/atomic).
37  *
38  * The list of operations has been extracted from sync-builtins.def file
39  * in the gcc distribution (as of gcc 4.8.2).
40  */
41 
42 #include <machine/types.h>
43 #include <sys/inttypes.h>
44 
45 volatile uint8_t u8 = 0;
46 volatile uint16_t u16 = 0;
47 volatile uint32_t u32 = 0;
48 
49 #ifdef __HAVE_ATOMIC64_OPS
50 volatile uint64_t u64 = 0;
51 #endif
52 
53 int
main(int argc,char ** argv)54 main(int argc, char **argv)
55 {
56 	__sync_synchronize();
57 	__sync_add_and_fetch(&u8, 1);
58 	__sync_add_and_fetch_1(&u8, 1);
59 	__sync_add_and_fetch_2(&u16, 1);
60 	__sync_add_and_fetch_4(&u32, 1);
61 #ifdef __HAVE_ATOMIC64_OPS
62 	__sync_add_and_fetch_8(&u64, 1);
63 #endif
64 	__sync_bool_compare_and_swap(&u8, 1, 2);
65 	__sync_bool_compare_and_swap_1(&u8, 1, 2);
66 	__sync_bool_compare_and_swap_2(&u16, 1, 2);
67 	__sync_bool_compare_and_swap_4(&u32, 1, 2);
68 #ifdef __HAVE_ATOMIC64_OPS
69 	__sync_bool_compare_and_swap_8(&u64, 1, 2);
70 #endif
71 	__sync_fetch_and_add(&u8, 1);
72 	__sync_fetch_and_add_1(&u8, 1);
73 	__sync_fetch_and_add_2(&u16, 1);
74 	__sync_fetch_and_add_4(&u32, 1);
75 #ifdef __HAVE_ATOMIC64_OPS
76 	__sync_fetch_and_add_8(&u64, 1);
77 #endif
78 	__sync_fetch_and_and(&u8, 0x80);
79 	__sync_fetch_and_and_1(&u8, 0x80);
80 	__sync_fetch_and_and_2(&u16, 0x80);
81 	__sync_fetch_and_and_4(&u32, 0x80);
82 #ifdef __HAVE_ATOMIC64_OPS
83 	__sync_fetch_and_and_8(&u64, 0x80);
84 #endif
85 #ifndef __clang__
86 	__sync_fetch_and_nand(&u8, 0x80);
87 	__sync_fetch_and_nand_1(&u8, 0x80);
88 	__sync_fetch_and_nand_2(&u16, 0x80);
89 	__sync_fetch_and_nand_4(&u32, 0x80);
90 #ifdef __HAVE_ATOMIC64_OPS
91 	__sync_fetch_and_nand_8(&u64, 0x80);
92 #endif
93 #endif
94 	__sync_fetch_and_or(&u8, 0x80);
95 	__sync_fetch_and_or_1(&u8, 0x80);
96 	__sync_fetch_and_or_2(&u16, 0x80);
97 	__sync_fetch_and_or_4(&u32, 0x80);
98 #ifdef __HAVE_ATOMIC64_OPS
99 	__sync_fetch_and_or_8(&u64, 0x80);
100 #endif
101 	__sync_fetch_and_sub(&u8, 0x80);
102 	__sync_fetch_and_sub_1(&u8, 0x80);
103 	__sync_fetch_and_sub_2(&u16, 0x80);
104 	__sync_fetch_and_sub_4(&u32, 0x80);
105 #ifdef __HAVE_ATOMIC64_OPS
106 	__sync_fetch_and_sub_8(&u64, 0x80);
107 #endif
108 	__sync_fetch_and_xor(&u8, 0x80);
109 	__sync_fetch_and_xor_1(&u8, 0x80);
110 	__sync_fetch_and_xor_2(&u16, 0x80);
111 	__sync_fetch_and_xor_4(&u32, 0x80);
112 #ifdef __HAVE_ATOMIC64_OPS
113 	__sync_fetch_and_xor_8(&u64, 0x80);
114 #endif
115 	__sync_lock_release(&u8);
116 	__sync_lock_release_1(&u8);
117 	__sync_lock_release_2(&u16);
118 	__sync_lock_release_4(&u32);
119 #ifdef __HAVE_ATOMIC64_OPS
120 	__sync_lock_release_8(&u64);
121 #endif
122 	__sync_lock_test_and_set(&u8, 5);
123 	__sync_lock_test_and_set_1(&u8, 5);
124 	__sync_lock_test_and_set_2(&u16, 5);
125 	__sync_lock_test_and_set_4(&u32, 5);
126 #ifdef __HAVE_ATOMIC64_OPS
127 	__sync_lock_test_and_set_8(&u64, 5);
128 #endif
129 #ifndef __clang__
130 	__sync_nand_and_fetch(&u8, 5);
131 	__sync_nand_and_fetch_1(&u8, 5);
132 	__sync_nand_and_fetch_2(&u16, 5);
133 	__sync_nand_and_fetch_4(&u32, 5);
134 #ifdef __HAVE_ATOMIC64_OPS
135 	__sync_nand_and_fetch_8(&u64, 5);
136 #endif
137 #endif
138 	__sync_or_and_fetch(&u8, 5);
139 	__sync_or_and_fetch_1(&u8, 5);
140 	__sync_or_and_fetch_2(&u16, 5);
141 	__sync_or_and_fetch_4(&u32, 5);
142 #ifdef __HAVE_ATOMIC64_OPS
143 	__sync_or_and_fetch_8(&u64, 5);
144 #endif
145 	__sync_sub_and_fetch(&u8, 5);
146 	__sync_sub_and_fetch_1(&u8, 5);
147 	__sync_sub_and_fetch_2(&u16, 5);
148 	__sync_sub_and_fetch_4(&u32, 5);
149 #ifdef __HAVE_ATOMIC64_OPS
150 	__sync_sub_and_fetch_8(&u64, 5);
151 #endif
152 	__sync_val_compare_and_swap(&u8, 5, 9);
153 	__sync_val_compare_and_swap_1(&u8, 5, 9);
154 	__sync_val_compare_and_swap_2(&u16, 5, 9);
155 	__sync_val_compare_and_swap_4(&u32, 5, 9);
156 #ifdef __HAVE_ATOMIC64_OPS
157 	__sync_val_compare_and_swap_8(&u64, 5, 9);
158 #endif
159 	__sync_xor_and_fetch(&u8, 5);
160 	__sync_xor_and_fetch_1(&u8, 5);
161 	__sync_xor_and_fetch_2(&u16, 5);
162 	__sync_xor_and_fetch_4(&u32, 5);
163 #ifdef __HAVE_ATOMIC64_OPS
164 	__sync_xor_and_fetch_8(&u64, 5);
165 #endif
166 
167 	return 0;
168 }
169