1 /* SPDX-License-Identifier: GPL-2.0+ */
2 
3 #ifndef _ASM_GENERIC_ATOMIC_H
4 #define _ASM_GENERIC_ATOMIC_H
5 
6 typedef struct { volatile int counter; } atomic_t;
7 #if BITS_PER_LONG == 32
8 typedef struct { volatile long long counter; } atomic64_t;
9 #else /* BIT_PER_LONG == 32 */
10 typedef struct { volatile long counter; } atomic64_t;
11 #endif
12 
13 #define ATOMIC_INIT(i)	{ (i) }
14 
15 #define atomic_read(v)		((v)->counter)
16 #define atomic_set(v, i)	((v)->counter = (i))
17 #define atomic64_read(v)	atomic_read(v)
18 #define atomic64_set(v, i)	atomic_set(v, i)
19 
atomic_add(int i,atomic_t * v)20 static inline void atomic_add(int i, atomic_t *v)
21 {
22 	unsigned long flags = 0;
23 
24 	local_irq_save(flags);
25 	v->counter += i;
26 	local_irq_restore(flags);
27 }
28 
atomic_sub(int i,atomic_t * v)29 static inline void atomic_sub(int i, atomic_t *v)
30 {
31 	unsigned long flags = 0;
32 
33 	local_irq_save(flags);
34 	v->counter -= i;
35 	local_irq_restore(flags);
36 }
37 
atomic_inc(atomic_t * v)38 static inline void atomic_inc(atomic_t *v)
39 {
40 	unsigned long flags = 0;
41 
42 	local_irq_save(flags);
43 	++v->counter;
44 	local_irq_restore(flags);
45 }
46 
atomic_dec(atomic_t * v)47 static inline void atomic_dec(atomic_t *v)
48 {
49 	unsigned long flags = 0;
50 
51 	local_irq_save(flags);
52 	--v->counter;
53 	local_irq_restore(flags);
54 }
55 
atomic_dec_and_test(volatile atomic_t * v)56 static inline int atomic_dec_and_test(volatile atomic_t *v)
57 {
58 	unsigned long flags = 0;
59 	int val;
60 
61 	local_irq_save(flags);
62 	val = v->counter;
63 	v->counter = val -= 1;
64 	local_irq_restore(flags);
65 
66 	return val == 0;
67 }
68 
atomic_add_negative(int i,volatile atomic_t * v)69 static inline int atomic_add_negative(int i, volatile atomic_t *v)
70 {
71 	unsigned long flags = 0;
72 	int val;
73 
74 	local_irq_save(flags);
75 	val = v->counter;
76 	v->counter = val += i;
77 	local_irq_restore(flags);
78 
79 	return val < 0;
80 }
81 
atomic_clear_mask(unsigned long mask,unsigned long * addr)82 static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
83 {
84 	unsigned long flags = 0;
85 
86 	local_irq_save(flags);
87 	*addr &= ~mask;
88 	local_irq_restore(flags);
89 }
90 
91 #if BITS_PER_LONG == 32
92 
atomic64_add(long long i,volatile atomic64_t * v)93 static inline void atomic64_add(long long i, volatile atomic64_t *v)
94 {
95 	unsigned long flags = 0;
96 
97 	local_irq_save(flags);
98 	v->counter += i;
99 	local_irq_restore(flags);
100 }
101 
atomic64_sub(long long i,volatile atomic64_t * v)102 static inline void atomic64_sub(long long i, volatile atomic64_t *v)
103 {
104 	unsigned long flags = 0;
105 
106 	local_irq_save(flags);
107 	v->counter -= i;
108 	local_irq_restore(flags);
109 }
110 
111 #else /* BIT_PER_LONG == 32 */
112 
atomic64_add(long i,volatile atomic64_t * v)113 static inline void atomic64_add(long i, volatile atomic64_t *v)
114 {
115 	unsigned long flags = 0;
116 
117 	local_irq_save(flags);
118 	v->counter += i;
119 	local_irq_restore(flags);
120 }
121 
atomic64_sub(long i,volatile atomic64_t * v)122 static inline void atomic64_sub(long i, volatile atomic64_t *v)
123 {
124 	unsigned long flags = 0;
125 
126 	local_irq_save(flags);
127 	v->counter -= i;
128 	local_irq_restore(flags);
129 }
130 #endif
131 
atomic64_inc(volatile atomic64_t * v)132 static inline void atomic64_inc(volatile atomic64_t *v)
133 {
134 	unsigned long flags = 0;
135 
136 	local_irq_save(flags);
137 	v->counter += 1;
138 	local_irq_restore(flags);
139 }
140 
atomic64_dec(volatile atomic64_t * v)141 static inline void atomic64_dec(volatile atomic64_t *v)
142 {
143 	unsigned long flags = 0;
144 
145 	local_irq_save(flags);
146 	v->counter -= 1;
147 	local_irq_restore(flags);
148 }
149 
150 #endif
151