1 // SPDX-License-Identifier: GPL-2.0
2 
3 // Generated by scripts/atomic/gen-atomic-instrumented.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5 
6 /*
7  * This file provoides atomic operations with explicit instrumentation (e.g.
8  * KASAN, KCSAN), which should be used unless it is necessary to avoid
9  * instrumentation. Where it is necessary to aovid instrumenation, the
10  * raw_atomic*() operations should be used.
11  */
12 #ifndef _LINUX_ATOMIC_INSTRUMENTED_H
13 #define _LINUX_ATOMIC_INSTRUMENTED_H
14 
15 #include <linux/build_bug.h>
16 #include <linux/compiler.h>
17 #include <linux/instrumented.h>
18 
19 /**
20  * atomic_read() - atomic load with relaxed ordering
21  * @v: pointer to atomic_t
22  *
23  * Atomically loads the value of @v with relaxed ordering.
24  *
25  * Unsafe to use in noinstr code; use raw_atomic_read() there.
26  *
27  * Return: The value loaded from @v.
28  */
29 static __always_inline int
atomic_read(const atomic_t * v)30 atomic_read(const atomic_t *v)
31 {
32 	instrument_atomic_read(v, sizeof(*v));
33 	return raw_atomic_read(v);
34 }
35 
36 /**
37  * atomic_read_acquire() - atomic load with acquire ordering
38  * @v: pointer to atomic_t
39  *
40  * Atomically loads the value of @v with acquire ordering.
41  *
42  * Unsafe to use in noinstr code; use raw_atomic_read_acquire() there.
43  *
44  * Return: The value loaded from @v.
45  */
46 static __always_inline int
atomic_read_acquire(const atomic_t * v)47 atomic_read_acquire(const atomic_t *v)
48 {
49 	instrument_atomic_read(v, sizeof(*v));
50 	return raw_atomic_read_acquire(v);
51 }
52 
53 /**
54  * atomic_set() - atomic set with relaxed ordering
55  * @v: pointer to atomic_t
56  * @i: int value to assign
57  *
58  * Atomically sets @v to @i with relaxed ordering.
59  *
60  * Unsafe to use in noinstr code; use raw_atomic_set() there.
61  *
62  * Return: Nothing.
63  */
64 static __always_inline void
atomic_set(atomic_t * v,int i)65 atomic_set(atomic_t *v, int i)
66 {
67 	instrument_atomic_write(v, sizeof(*v));
68 	raw_atomic_set(v, i);
69 }
70 
71 /**
72  * atomic_set_release() - atomic set with release ordering
73  * @v: pointer to atomic_t
74  * @i: int value to assign
75  *
76  * Atomically sets @v to @i with release ordering.
77  *
78  * Unsafe to use in noinstr code; use raw_atomic_set_release() there.
79  *
80  * Return: Nothing.
81  */
82 static __always_inline void
atomic_set_release(atomic_t * v,int i)83 atomic_set_release(atomic_t *v, int i)
84 {
85 	kcsan_release();
86 	instrument_atomic_write(v, sizeof(*v));
87 	raw_atomic_set_release(v, i);
88 }
89 
90 /**
91  * atomic_add() - atomic add with relaxed ordering
92  * @i: int value to add
93  * @v: pointer to atomic_t
94  *
95  * Atomically updates @v to (@v + @i) with relaxed ordering.
96  *
97  * Unsafe to use in noinstr code; use raw_atomic_add() there.
98  *
99  * Return: Nothing.
100  */
101 static __always_inline void
atomic_add(int i,atomic_t * v)102 atomic_add(int i, atomic_t *v)
103 {
104 	instrument_atomic_read_write(v, sizeof(*v));
105 	raw_atomic_add(i, v);
106 }
107 
108 /**
109  * atomic_add_return() - atomic add with full ordering
110  * @i: int value to add
111  * @v: pointer to atomic_t
112  *
113  * Atomically updates @v to (@v + @i) with full ordering.
114  *
115  * Unsafe to use in noinstr code; use raw_atomic_add_return() there.
116  *
117  * Return: The updated value of @v.
118  */
119 static __always_inline int
atomic_add_return(int i,atomic_t * v)120 atomic_add_return(int i, atomic_t *v)
121 {
122 	kcsan_mb();
123 	instrument_atomic_read_write(v, sizeof(*v));
124 	return raw_atomic_add_return(i, v);
125 }
126 
127 /**
128  * atomic_add_return_acquire() - atomic add with acquire ordering
129  * @i: int value to add
130  * @v: pointer to atomic_t
131  *
132  * Atomically updates @v to (@v + @i) with acquire ordering.
133  *
134  * Unsafe to use in noinstr code; use raw_atomic_add_return_acquire() there.
135  *
136  * Return: The updated value of @v.
137  */
138 static __always_inline int
atomic_add_return_acquire(int i,atomic_t * v)139 atomic_add_return_acquire(int i, atomic_t *v)
140 {
141 	instrument_atomic_read_write(v, sizeof(*v));
142 	return raw_atomic_add_return_acquire(i, v);
143 }
144 
145 /**
146  * atomic_add_return_release() - atomic add with release ordering
147  * @i: int value to add
148  * @v: pointer to atomic_t
149  *
150  * Atomically updates @v to (@v + @i) with release ordering.
151  *
152  * Unsafe to use in noinstr code; use raw_atomic_add_return_release() there.
153  *
154  * Return: The updated value of @v.
155  */
156 static __always_inline int
atomic_add_return_release(int i,atomic_t * v)157 atomic_add_return_release(int i, atomic_t *v)
158 {
159 	kcsan_release();
160 	instrument_atomic_read_write(v, sizeof(*v));
161 	return raw_atomic_add_return_release(i, v);
162 }
163 
164 /**
165  * atomic_add_return_relaxed() - atomic add with relaxed ordering
166  * @i: int value to add
167  * @v: pointer to atomic_t
168  *
169  * Atomically updates @v to (@v + @i) with relaxed ordering.
170  *
171  * Unsafe to use in noinstr code; use raw_atomic_add_return_relaxed() there.
172  *
173  * Return: The updated value of @v.
174  */
175 static __always_inline int
atomic_add_return_relaxed(int i,atomic_t * v)176 atomic_add_return_relaxed(int i, atomic_t *v)
177 {
178 	instrument_atomic_read_write(v, sizeof(*v));
179 	return raw_atomic_add_return_relaxed(i, v);
180 }
181 
182 /**
183  * atomic_fetch_add() - atomic add with full ordering
184  * @i: int value to add
185  * @v: pointer to atomic_t
186  *
187  * Atomically updates @v to (@v + @i) with full ordering.
188  *
189  * Unsafe to use in noinstr code; use raw_atomic_fetch_add() there.
190  *
191  * Return: The original value of @v.
192  */
193 static __always_inline int
atomic_fetch_add(int i,atomic_t * v)194 atomic_fetch_add(int i, atomic_t *v)
195 {
196 	kcsan_mb();
197 	instrument_atomic_read_write(v, sizeof(*v));
198 	return raw_atomic_fetch_add(i, v);
199 }
200 
201 /**
202  * atomic_fetch_add_acquire() - atomic add with acquire ordering
203  * @i: int value to add
204  * @v: pointer to atomic_t
205  *
206  * Atomically updates @v to (@v + @i) with acquire ordering.
207  *
208  * Unsafe to use in noinstr code; use raw_atomic_fetch_add_acquire() there.
209  *
210  * Return: The original value of @v.
211  */
212 static __always_inline int
atomic_fetch_add_acquire(int i,atomic_t * v)213 atomic_fetch_add_acquire(int i, atomic_t *v)
214 {
215 	instrument_atomic_read_write(v, sizeof(*v));
216 	return raw_atomic_fetch_add_acquire(i, v);
217 }
218 
219 /**
220  * atomic_fetch_add_release() - atomic add with release ordering
221  * @i: int value to add
222  * @v: pointer to atomic_t
223  *
224  * Atomically updates @v to (@v + @i) with release ordering.
225  *
226  * Unsafe to use in noinstr code; use raw_atomic_fetch_add_release() there.
227  *
228  * Return: The original value of @v.
229  */
230 static __always_inline int
atomic_fetch_add_release(int i,atomic_t * v)231 atomic_fetch_add_release(int i, atomic_t *v)
232 {
233 	kcsan_release();
234 	instrument_atomic_read_write(v, sizeof(*v));
235 	return raw_atomic_fetch_add_release(i, v);
236 }
237 
238 /**
239  * atomic_fetch_add_relaxed() - atomic add with relaxed ordering
240  * @i: int value to add
241  * @v: pointer to atomic_t
242  *
243  * Atomically updates @v to (@v + @i) with relaxed ordering.
244  *
245  * Unsafe to use in noinstr code; use raw_atomic_fetch_add_relaxed() there.
246  *
247  * Return: The original value of @v.
248  */
249 static __always_inline int
atomic_fetch_add_relaxed(int i,atomic_t * v)250 atomic_fetch_add_relaxed(int i, atomic_t *v)
251 {
252 	instrument_atomic_read_write(v, sizeof(*v));
253 	return raw_atomic_fetch_add_relaxed(i, v);
254 }
255 
256 /**
257  * atomic_sub() - atomic subtract with relaxed ordering
258  * @i: int value to subtract
259  * @v: pointer to atomic_t
260  *
261  * Atomically updates @v to (@v - @i) with relaxed ordering.
262  *
263  * Unsafe to use in noinstr code; use raw_atomic_sub() there.
264  *
265  * Return: Nothing.
266  */
267 static __always_inline void
atomic_sub(int i,atomic_t * v)268 atomic_sub(int i, atomic_t *v)
269 {
270 	instrument_atomic_read_write(v, sizeof(*v));
271 	raw_atomic_sub(i, v);
272 }
273 
274 /**
275  * atomic_sub_return() - atomic subtract with full ordering
276  * @i: int value to subtract
277  * @v: pointer to atomic_t
278  *
279  * Atomically updates @v to (@v - @i) with full ordering.
280  *
281  * Unsafe to use in noinstr code; use raw_atomic_sub_return() there.
282  *
283  * Return: The updated value of @v.
284  */
285 static __always_inline int
atomic_sub_return(int i,atomic_t * v)286 atomic_sub_return(int i, atomic_t *v)
287 {
288 	kcsan_mb();
289 	instrument_atomic_read_write(v, sizeof(*v));
290 	return raw_atomic_sub_return(i, v);
291 }
292 
293 /**
294  * atomic_sub_return_acquire() - atomic subtract with acquire ordering
295  * @i: int value to subtract
296  * @v: pointer to atomic_t
297  *
298  * Atomically updates @v to (@v - @i) with acquire ordering.
299  *
300  * Unsafe to use in noinstr code; use raw_atomic_sub_return_acquire() there.
301  *
302  * Return: The updated value of @v.
303  */
304 static __always_inline int
atomic_sub_return_acquire(int i,atomic_t * v)305 atomic_sub_return_acquire(int i, atomic_t *v)
306 {
307 	instrument_atomic_read_write(v, sizeof(*v));
308 	return raw_atomic_sub_return_acquire(i, v);
309 }
310 
311 /**
312  * atomic_sub_return_release() - atomic subtract with release ordering
313  * @i: int value to subtract
314  * @v: pointer to atomic_t
315  *
316  * Atomically updates @v to (@v - @i) with release ordering.
317  *
318  * Unsafe to use in noinstr code; use raw_atomic_sub_return_release() there.
319  *
320  * Return: The updated value of @v.
321  */
322 static __always_inline int
atomic_sub_return_release(int i,atomic_t * v)323 atomic_sub_return_release(int i, atomic_t *v)
324 {
325 	kcsan_release();
326 	instrument_atomic_read_write(v, sizeof(*v));
327 	return raw_atomic_sub_return_release(i, v);
328 }
329 
330 /**
331  * atomic_sub_return_relaxed() - atomic subtract with relaxed ordering
332  * @i: int value to subtract
333  * @v: pointer to atomic_t
334  *
335  * Atomically updates @v to (@v - @i) with relaxed ordering.
336  *
337  * Unsafe to use in noinstr code; use raw_atomic_sub_return_relaxed() there.
338  *
339  * Return: The updated value of @v.
340  */
341 static __always_inline int
atomic_sub_return_relaxed(int i,atomic_t * v)342 atomic_sub_return_relaxed(int i, atomic_t *v)
343 {
344 	instrument_atomic_read_write(v, sizeof(*v));
345 	return raw_atomic_sub_return_relaxed(i, v);
346 }
347 
348 /**
349  * atomic_fetch_sub() - atomic subtract with full ordering
350  * @i: int value to subtract
351  * @v: pointer to atomic_t
352  *
353  * Atomically updates @v to (@v - @i) with full ordering.
354  *
355  * Unsafe to use in noinstr code; use raw_atomic_fetch_sub() there.
356  *
357  * Return: The original value of @v.
358  */
359 static __always_inline int
atomic_fetch_sub(int i,atomic_t * v)360 atomic_fetch_sub(int i, atomic_t *v)
361 {
362 	kcsan_mb();
363 	instrument_atomic_read_write(v, sizeof(*v));
364 	return raw_atomic_fetch_sub(i, v);
365 }
366 
367 /**
368  * atomic_fetch_sub_acquire() - atomic subtract with acquire ordering
369  * @i: int value to subtract
370  * @v: pointer to atomic_t
371  *
372  * Atomically updates @v to (@v - @i) with acquire ordering.
373  *
374  * Unsafe to use in noinstr code; use raw_atomic_fetch_sub_acquire() there.
375  *
376  * Return: The original value of @v.
377  */
378 static __always_inline int
atomic_fetch_sub_acquire(int i,atomic_t * v)379 atomic_fetch_sub_acquire(int i, atomic_t *v)
380 {
381 	instrument_atomic_read_write(v, sizeof(*v));
382 	return raw_atomic_fetch_sub_acquire(i, v);
383 }
384 
385 /**
386  * atomic_fetch_sub_release() - atomic subtract with release ordering
387  * @i: int value to subtract
388  * @v: pointer to atomic_t
389  *
390  * Atomically updates @v to (@v - @i) with release ordering.
391  *
392  * Unsafe to use in noinstr code; use raw_atomic_fetch_sub_release() there.
393  *
394  * Return: The original value of @v.
395  */
396 static __always_inline int
atomic_fetch_sub_release(int i,atomic_t * v)397 atomic_fetch_sub_release(int i, atomic_t *v)
398 {
399 	kcsan_release();
400 	instrument_atomic_read_write(v, sizeof(*v));
401 	return raw_atomic_fetch_sub_release(i, v);
402 }
403 
404 /**
405  * atomic_fetch_sub_relaxed() - atomic subtract with relaxed ordering
406  * @i: int value to subtract
407  * @v: pointer to atomic_t
408  *
409  * Atomically updates @v to (@v - @i) with relaxed ordering.
410  *
411  * Unsafe to use in noinstr code; use raw_atomic_fetch_sub_relaxed() there.
412  *
413  * Return: The original value of @v.
414  */
415 static __always_inline int
atomic_fetch_sub_relaxed(int i,atomic_t * v)416 atomic_fetch_sub_relaxed(int i, atomic_t *v)
417 {
418 	instrument_atomic_read_write(v, sizeof(*v));
419 	return raw_atomic_fetch_sub_relaxed(i, v);
420 }
421 
422 /**
423  * atomic_inc() - atomic increment with relaxed ordering
424  * @v: pointer to atomic_t
425  *
426  * Atomically updates @v to (@v + 1) with relaxed ordering.
427  *
428  * Unsafe to use in noinstr code; use raw_atomic_inc() there.
429  *
430  * Return: Nothing.
431  */
432 static __always_inline void
atomic_inc(atomic_t * v)433 atomic_inc(atomic_t *v)
434 {
435 	instrument_atomic_read_write(v, sizeof(*v));
436 	raw_atomic_inc(v);
437 }
438 
439 /**
440  * atomic_inc_return() - atomic increment with full ordering
441  * @v: pointer to atomic_t
442  *
443  * Atomically updates @v to (@v + 1) with full ordering.
444  *
445  * Unsafe to use in noinstr code; use raw_atomic_inc_return() there.
446  *
447  * Return: The updated value of @v.
448  */
449 static __always_inline int
atomic_inc_return(atomic_t * v)450 atomic_inc_return(atomic_t *v)
451 {
452 	kcsan_mb();
453 	instrument_atomic_read_write(v, sizeof(*v));
454 	return raw_atomic_inc_return(v);
455 }
456 
457 /**
458  * atomic_inc_return_acquire() - atomic increment with acquire ordering
459  * @v: pointer to atomic_t
460  *
461  * Atomically updates @v to (@v + 1) with acquire ordering.
462  *
463  * Unsafe to use in noinstr code; use raw_atomic_inc_return_acquire() there.
464  *
465  * Return: The updated value of @v.
466  */
467 static __always_inline int
atomic_inc_return_acquire(atomic_t * v)468 atomic_inc_return_acquire(atomic_t *v)
469 {
470 	instrument_atomic_read_write(v, sizeof(*v));
471 	return raw_atomic_inc_return_acquire(v);
472 }
473 
474 /**
475  * atomic_inc_return_release() - atomic increment with release ordering
476  * @v: pointer to atomic_t
477  *
478  * Atomically updates @v to (@v + 1) with release ordering.
479  *
480  * Unsafe to use in noinstr code; use raw_atomic_inc_return_release() there.
481  *
482  * Return: The updated value of @v.
483  */
484 static __always_inline int
atomic_inc_return_release(atomic_t * v)485 atomic_inc_return_release(atomic_t *v)
486 {
487 	kcsan_release();
488 	instrument_atomic_read_write(v, sizeof(*v));
489 	return raw_atomic_inc_return_release(v);
490 }
491 
492 /**
493  * atomic_inc_return_relaxed() - atomic increment with relaxed ordering
494  * @v: pointer to atomic_t
495  *
496  * Atomically updates @v to (@v + 1) with relaxed ordering.
497  *
498  * Unsafe to use in noinstr code; use raw_atomic_inc_return_relaxed() there.
499  *
500  * Return: The updated value of @v.
501  */
502 static __always_inline int
atomic_inc_return_relaxed(atomic_t * v)503 atomic_inc_return_relaxed(atomic_t *v)
504 {
505 	instrument_atomic_read_write(v, sizeof(*v));
506 	return raw_atomic_inc_return_relaxed(v);
507 }
508 
509 /**
510  * atomic_fetch_inc() - atomic increment with full ordering
511  * @v: pointer to atomic_t
512  *
513  * Atomically updates @v to (@v + 1) with full ordering.
514  *
515  * Unsafe to use in noinstr code; use raw_atomic_fetch_inc() there.
516  *
517  * Return: The original value of @v.
518  */
519 static __always_inline int
atomic_fetch_inc(atomic_t * v)520 atomic_fetch_inc(atomic_t *v)
521 {
522 	kcsan_mb();
523 	instrument_atomic_read_write(v, sizeof(*v));
524 	return raw_atomic_fetch_inc(v);
525 }
526 
527 /**
528  * atomic_fetch_inc_acquire() - atomic increment with acquire ordering
529  * @v: pointer to atomic_t
530  *
531  * Atomically updates @v to (@v + 1) with acquire ordering.
532  *
533  * Unsafe to use in noinstr code; use raw_atomic_fetch_inc_acquire() there.
534  *
535  * Return: The original value of @v.
536  */
537 static __always_inline int
atomic_fetch_inc_acquire(atomic_t * v)538 atomic_fetch_inc_acquire(atomic_t *v)
539 {
540 	instrument_atomic_read_write(v, sizeof(*v));
541 	return raw_atomic_fetch_inc_acquire(v);
542 }
543 
544 /**
545  * atomic_fetch_inc_release() - atomic increment with release ordering
546  * @v: pointer to atomic_t
547  *
548  * Atomically updates @v to (@v + 1) with release ordering.
549  *
550  * Unsafe to use in noinstr code; use raw_atomic_fetch_inc_release() there.
551  *
552  * Return: The original value of @v.
553  */
554 static __always_inline int
atomic_fetch_inc_release(atomic_t * v)555 atomic_fetch_inc_release(atomic_t *v)
556 {
557 	kcsan_release();
558 	instrument_atomic_read_write(v, sizeof(*v));
559 	return raw_atomic_fetch_inc_release(v);
560 }
561 
562 /**
563  * atomic_fetch_inc_relaxed() - atomic increment with relaxed ordering
564  * @v: pointer to atomic_t
565  *
566  * Atomically updates @v to (@v + 1) with relaxed ordering.
567  *
568  * Unsafe to use in noinstr code; use raw_atomic_fetch_inc_relaxed() there.
569  *
570  * Return: The original value of @v.
571  */
572 static __always_inline int
atomic_fetch_inc_relaxed(atomic_t * v)573 atomic_fetch_inc_relaxed(atomic_t *v)
574 {
575 	instrument_atomic_read_write(v, sizeof(*v));
576 	return raw_atomic_fetch_inc_relaxed(v);
577 }
578 
579 /**
580  * atomic_dec() - atomic decrement with relaxed ordering
581  * @v: pointer to atomic_t
582  *
583  * Atomically updates @v to (@v - 1) with relaxed ordering.
584  *
585  * Unsafe to use in noinstr code; use raw_atomic_dec() there.
586  *
587  * Return: Nothing.
588  */
589 static __always_inline void
atomic_dec(atomic_t * v)590 atomic_dec(atomic_t *v)
591 {
592 	instrument_atomic_read_write(v, sizeof(*v));
593 	raw_atomic_dec(v);
594 }
595 
596 /**
597  * atomic_dec_return() - atomic decrement with full ordering
598  * @v: pointer to atomic_t
599  *
600  * Atomically updates @v to (@v - 1) with full ordering.
601  *
602  * Unsafe to use in noinstr code; use raw_atomic_dec_return() there.
603  *
604  * Return: The updated value of @v.
605  */
606 static __always_inline int
atomic_dec_return(atomic_t * v)607 atomic_dec_return(atomic_t *v)
608 {
609 	kcsan_mb();
610 	instrument_atomic_read_write(v, sizeof(*v));
611 	return raw_atomic_dec_return(v);
612 }
613 
614 /**
615  * atomic_dec_return_acquire() - atomic decrement with acquire ordering
616  * @v: pointer to atomic_t
617  *
618  * Atomically updates @v to (@v - 1) with acquire ordering.
619  *
620  * Unsafe to use in noinstr code; use raw_atomic_dec_return_acquire() there.
621  *
622  * Return: The updated value of @v.
623  */
624 static __always_inline int
atomic_dec_return_acquire(atomic_t * v)625 atomic_dec_return_acquire(atomic_t *v)
626 {
627 	instrument_atomic_read_write(v, sizeof(*v));
628 	return raw_atomic_dec_return_acquire(v);
629 }
630 
631 /**
632  * atomic_dec_return_release() - atomic decrement with release ordering
633  * @v: pointer to atomic_t
634  *
635  * Atomically updates @v to (@v - 1) with release ordering.
636  *
637  * Unsafe to use in noinstr code; use raw_atomic_dec_return_release() there.
638  *
639  * Return: The updated value of @v.
640  */
641 static __always_inline int
atomic_dec_return_release(atomic_t * v)642 atomic_dec_return_release(atomic_t *v)
643 {
644 	kcsan_release();
645 	instrument_atomic_read_write(v, sizeof(*v));
646 	return raw_atomic_dec_return_release(v);
647 }
648 
649 /**
650  * atomic_dec_return_relaxed() - atomic decrement with relaxed ordering
651  * @v: pointer to atomic_t
652  *
653  * Atomically updates @v to (@v - 1) with relaxed ordering.
654  *
655  * Unsafe to use in noinstr code; use raw_atomic_dec_return_relaxed() there.
656  *
657  * Return: The updated value of @v.
658  */
659 static __always_inline int
atomic_dec_return_relaxed(atomic_t * v)660 atomic_dec_return_relaxed(atomic_t *v)
661 {
662 	instrument_atomic_read_write(v, sizeof(*v));
663 	return raw_atomic_dec_return_relaxed(v);
664 }
665 
666 /**
667  * atomic_fetch_dec() - atomic decrement with full ordering
668  * @v: pointer to atomic_t
669  *
670  * Atomically updates @v to (@v - 1) with full ordering.
671  *
672  * Unsafe to use in noinstr code; use raw_atomic_fetch_dec() there.
673  *
674  * Return: The original value of @v.
675  */
676 static __always_inline int
atomic_fetch_dec(atomic_t * v)677 atomic_fetch_dec(atomic_t *v)
678 {
679 	kcsan_mb();
680 	instrument_atomic_read_write(v, sizeof(*v));
681 	return raw_atomic_fetch_dec(v);
682 }
683 
684 /**
685  * atomic_fetch_dec_acquire() - atomic decrement with acquire ordering
686  * @v: pointer to atomic_t
687  *
688  * Atomically updates @v to (@v - 1) with acquire ordering.
689  *
690  * Unsafe to use in noinstr code; use raw_atomic_fetch_dec_acquire() there.
691  *
692  * Return: The original value of @v.
693  */
694 static __always_inline int
atomic_fetch_dec_acquire(atomic_t * v)695 atomic_fetch_dec_acquire(atomic_t *v)
696 {
697 	instrument_atomic_read_write(v, sizeof(*v));
698 	return raw_atomic_fetch_dec_acquire(v);
699 }
700 
701 /**
702  * atomic_fetch_dec_release() - atomic decrement with release ordering
703  * @v: pointer to atomic_t
704  *
705  * Atomically updates @v to (@v - 1) with release ordering.
706  *
707  * Unsafe to use in noinstr code; use raw_atomic_fetch_dec_release() there.
708  *
709  * Return: The original value of @v.
710  */
711 static __always_inline int
atomic_fetch_dec_release(atomic_t * v)712 atomic_fetch_dec_release(atomic_t *v)
713 {
714 	kcsan_release();
715 	instrument_atomic_read_write(v, sizeof(*v));
716 	return raw_atomic_fetch_dec_release(v);
717 }
718 
719 /**
720  * atomic_fetch_dec_relaxed() - atomic decrement with relaxed ordering
721  * @v: pointer to atomic_t
722  *
723  * Atomically updates @v to (@v - 1) with relaxed ordering.
724  *
725  * Unsafe to use in noinstr code; use raw_atomic_fetch_dec_relaxed() there.
726  *
727  * Return: The original value of @v.
728  */
729 static __always_inline int
atomic_fetch_dec_relaxed(atomic_t * v)730 atomic_fetch_dec_relaxed(atomic_t *v)
731 {
732 	instrument_atomic_read_write(v, sizeof(*v));
733 	return raw_atomic_fetch_dec_relaxed(v);
734 }
735 
736 /**
737  * atomic_and() - atomic bitwise AND with relaxed ordering
738  * @i: int value
739  * @v: pointer to atomic_t
740  *
741  * Atomically updates @v to (@v & @i) with relaxed ordering.
742  *
743  * Unsafe to use in noinstr code; use raw_atomic_and() there.
744  *
745  * Return: Nothing.
746  */
747 static __always_inline void
atomic_and(int i,atomic_t * v)748 atomic_and(int i, atomic_t *v)
749 {
750 	instrument_atomic_read_write(v, sizeof(*v));
751 	raw_atomic_and(i, v);
752 }
753 
754 /**
755  * atomic_fetch_and() - atomic bitwise AND with full ordering
756  * @i: int value
757  * @v: pointer to atomic_t
758  *
759  * Atomically updates @v to (@v & @i) with full ordering.
760  *
761  * Unsafe to use in noinstr code; use raw_atomic_fetch_and() there.
762  *
763  * Return: The original value of @v.
764  */
765 static __always_inline int
atomic_fetch_and(int i,atomic_t * v)766 atomic_fetch_and(int i, atomic_t *v)
767 {
768 	kcsan_mb();
769 	instrument_atomic_read_write(v, sizeof(*v));
770 	return raw_atomic_fetch_and(i, v);
771 }
772 
773 /**
774  * atomic_fetch_and_acquire() - atomic bitwise AND with acquire ordering
775  * @i: int value
776  * @v: pointer to atomic_t
777  *
778  * Atomically updates @v to (@v & @i) with acquire ordering.
779  *
780  * Unsafe to use in noinstr code; use raw_atomic_fetch_and_acquire() there.
781  *
782  * Return: The original value of @v.
783  */
784 static __always_inline int
atomic_fetch_and_acquire(int i,atomic_t * v)785 atomic_fetch_and_acquire(int i, atomic_t *v)
786 {
787 	instrument_atomic_read_write(v, sizeof(*v));
788 	return raw_atomic_fetch_and_acquire(i, v);
789 }
790 
791 /**
792  * atomic_fetch_and_release() - atomic bitwise AND with release ordering
793  * @i: int value
794  * @v: pointer to atomic_t
795  *
796  * Atomically updates @v to (@v & @i) with release ordering.
797  *
798  * Unsafe to use in noinstr code; use raw_atomic_fetch_and_release() there.
799  *
800  * Return: The original value of @v.
801  */
802 static __always_inline int
atomic_fetch_and_release(int i,atomic_t * v)803 atomic_fetch_and_release(int i, atomic_t *v)
804 {
805 	kcsan_release();
806 	instrument_atomic_read_write(v, sizeof(*v));
807 	return raw_atomic_fetch_and_release(i, v);
808 }
809 
810 /**
811  * atomic_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
812  * @i: int value
813  * @v: pointer to atomic_t
814  *
815  * Atomically updates @v to (@v & @i) with relaxed ordering.
816  *
817  * Unsafe to use in noinstr code; use raw_atomic_fetch_and_relaxed() there.
818  *
819  * Return: The original value of @v.
820  */
821 static __always_inline int
atomic_fetch_and_relaxed(int i,atomic_t * v)822 atomic_fetch_and_relaxed(int i, atomic_t *v)
823 {
824 	instrument_atomic_read_write(v, sizeof(*v));
825 	return raw_atomic_fetch_and_relaxed(i, v);
826 }
827 
828 /**
829  * atomic_andnot() - atomic bitwise AND NOT with relaxed ordering
830  * @i: int value
831  * @v: pointer to atomic_t
832  *
833  * Atomically updates @v to (@v & ~@i) with relaxed ordering.
834  *
835  * Unsafe to use in noinstr code; use raw_atomic_andnot() there.
836  *
837  * Return: Nothing.
838  */
839 static __always_inline void
atomic_andnot(int i,atomic_t * v)840 atomic_andnot(int i, atomic_t *v)
841 {
842 	instrument_atomic_read_write(v, sizeof(*v));
843 	raw_atomic_andnot(i, v);
844 }
845 
846 /**
847  * atomic_fetch_andnot() - atomic bitwise AND NOT with full ordering
848  * @i: int value
849  * @v: pointer to atomic_t
850  *
851  * Atomically updates @v to (@v & ~@i) with full ordering.
852  *
853  * Unsafe to use in noinstr code; use raw_atomic_fetch_andnot() there.
854  *
855  * Return: The original value of @v.
856  */
857 static __always_inline int
atomic_fetch_andnot(int i,atomic_t * v)858 atomic_fetch_andnot(int i, atomic_t *v)
859 {
860 	kcsan_mb();
861 	instrument_atomic_read_write(v, sizeof(*v));
862 	return raw_atomic_fetch_andnot(i, v);
863 }
864 
865 /**
866  * atomic_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
867  * @i: int value
868  * @v: pointer to atomic_t
869  *
870  * Atomically updates @v to (@v & ~@i) with acquire ordering.
871  *
872  * Unsafe to use in noinstr code; use raw_atomic_fetch_andnot_acquire() there.
873  *
874  * Return: The original value of @v.
875  */
876 static __always_inline int
atomic_fetch_andnot_acquire(int i,atomic_t * v)877 atomic_fetch_andnot_acquire(int i, atomic_t *v)
878 {
879 	instrument_atomic_read_write(v, sizeof(*v));
880 	return raw_atomic_fetch_andnot_acquire(i, v);
881 }
882 
883 /**
884  * atomic_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
885  * @i: int value
886  * @v: pointer to atomic_t
887  *
888  * Atomically updates @v to (@v & ~@i) with release ordering.
889  *
890  * Unsafe to use in noinstr code; use raw_atomic_fetch_andnot_release() there.
891  *
892  * Return: The original value of @v.
893  */
894 static __always_inline int
atomic_fetch_andnot_release(int i,atomic_t * v)895 atomic_fetch_andnot_release(int i, atomic_t *v)
896 {
897 	kcsan_release();
898 	instrument_atomic_read_write(v, sizeof(*v));
899 	return raw_atomic_fetch_andnot_release(i, v);
900 }
901 
902 /**
903  * atomic_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
904  * @i: int value
905  * @v: pointer to atomic_t
906  *
907  * Atomically updates @v to (@v & ~@i) with relaxed ordering.
908  *
909  * Unsafe to use in noinstr code; use raw_atomic_fetch_andnot_relaxed() there.
910  *
911  * Return: The original value of @v.
912  */
913 static __always_inline int
atomic_fetch_andnot_relaxed(int i,atomic_t * v)914 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
915 {
916 	instrument_atomic_read_write(v, sizeof(*v));
917 	return raw_atomic_fetch_andnot_relaxed(i, v);
918 }
919 
920 /**
921  * atomic_or() - atomic bitwise OR with relaxed ordering
922  * @i: int value
923  * @v: pointer to atomic_t
924  *
925  * Atomically updates @v to (@v | @i) with relaxed ordering.
926  *
927  * Unsafe to use in noinstr code; use raw_atomic_or() there.
928  *
929  * Return: Nothing.
930  */
931 static __always_inline void
atomic_or(int i,atomic_t * v)932 atomic_or(int i, atomic_t *v)
933 {
934 	instrument_atomic_read_write(v, sizeof(*v));
935 	raw_atomic_or(i, v);
936 }
937 
938 /**
939  * atomic_fetch_or() - atomic bitwise OR with full ordering
940  * @i: int value
941  * @v: pointer to atomic_t
942  *
943  * Atomically updates @v to (@v | @i) with full ordering.
944  *
945  * Unsafe to use in noinstr code; use raw_atomic_fetch_or() there.
946  *
947  * Return: The original value of @v.
948  */
949 static __always_inline int
atomic_fetch_or(int i,atomic_t * v)950 atomic_fetch_or(int i, atomic_t *v)
951 {
952 	kcsan_mb();
953 	instrument_atomic_read_write(v, sizeof(*v));
954 	return raw_atomic_fetch_or(i, v);
955 }
956 
957 /**
958  * atomic_fetch_or_acquire() - atomic bitwise OR with acquire ordering
959  * @i: int value
960  * @v: pointer to atomic_t
961  *
962  * Atomically updates @v to (@v | @i) with acquire ordering.
963  *
964  * Unsafe to use in noinstr code; use raw_atomic_fetch_or_acquire() there.
965  *
966  * Return: The original value of @v.
967  */
968 static __always_inline int
atomic_fetch_or_acquire(int i,atomic_t * v)969 atomic_fetch_or_acquire(int i, atomic_t *v)
970 {
971 	instrument_atomic_read_write(v, sizeof(*v));
972 	return raw_atomic_fetch_or_acquire(i, v);
973 }
974 
975 /**
976  * atomic_fetch_or_release() - atomic bitwise OR with release ordering
977  * @i: int value
978  * @v: pointer to atomic_t
979  *
980  * Atomically updates @v to (@v | @i) with release ordering.
981  *
982  * Unsafe to use in noinstr code; use raw_atomic_fetch_or_release() there.
983  *
984  * Return: The original value of @v.
985  */
986 static __always_inline int
atomic_fetch_or_release(int i,atomic_t * v)987 atomic_fetch_or_release(int i, atomic_t *v)
988 {
989 	kcsan_release();
990 	instrument_atomic_read_write(v, sizeof(*v));
991 	return raw_atomic_fetch_or_release(i, v);
992 }
993 
994 /**
995  * atomic_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
996  * @i: int value
997  * @v: pointer to atomic_t
998  *
999  * Atomically updates @v to (@v | @i) with relaxed ordering.
1000  *
1001  * Unsafe to use in noinstr code; use raw_atomic_fetch_or_relaxed() there.
1002  *
1003  * Return: The original value of @v.
1004  */
1005 static __always_inline int
atomic_fetch_or_relaxed(int i,atomic_t * v)1006 atomic_fetch_or_relaxed(int i, atomic_t *v)
1007 {
1008 	instrument_atomic_read_write(v, sizeof(*v));
1009 	return raw_atomic_fetch_or_relaxed(i, v);
1010 }
1011 
1012 /**
1013  * atomic_xor() - atomic bitwise XOR with relaxed ordering
1014  * @i: int value
1015  * @v: pointer to atomic_t
1016  *
1017  * Atomically updates @v to (@v ^ @i) with relaxed ordering.
1018  *
1019  * Unsafe to use in noinstr code; use raw_atomic_xor() there.
1020  *
1021  * Return: Nothing.
1022  */
1023 static __always_inline void
atomic_xor(int i,atomic_t * v)1024 atomic_xor(int i, atomic_t *v)
1025 {
1026 	instrument_atomic_read_write(v, sizeof(*v));
1027 	raw_atomic_xor(i, v);
1028 }
1029 
1030 /**
1031  * atomic_fetch_xor() - atomic bitwise XOR with full ordering
1032  * @i: int value
1033  * @v: pointer to atomic_t
1034  *
1035  * Atomically updates @v to (@v ^ @i) with full ordering.
1036  *
1037  * Unsafe to use in noinstr code; use raw_atomic_fetch_xor() there.
1038  *
1039  * Return: The original value of @v.
1040  */
1041 static __always_inline int
atomic_fetch_xor(int i,atomic_t * v)1042 atomic_fetch_xor(int i, atomic_t *v)
1043 {
1044 	kcsan_mb();
1045 	instrument_atomic_read_write(v, sizeof(*v));
1046 	return raw_atomic_fetch_xor(i, v);
1047 }
1048 
1049 /**
1050  * atomic_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
1051  * @i: int value
1052  * @v: pointer to atomic_t
1053  *
1054  * Atomically updates @v to (@v ^ @i) with acquire ordering.
1055  *
1056  * Unsafe to use in noinstr code; use raw_atomic_fetch_xor_acquire() there.
1057  *
1058  * Return: The original value of @v.
1059  */
1060 static __always_inline int
atomic_fetch_xor_acquire(int i,atomic_t * v)1061 atomic_fetch_xor_acquire(int i, atomic_t *v)
1062 {
1063 	instrument_atomic_read_write(v, sizeof(*v));
1064 	return raw_atomic_fetch_xor_acquire(i, v);
1065 }
1066 
1067 /**
1068  * atomic_fetch_xor_release() - atomic bitwise XOR with release ordering
1069  * @i: int value
1070  * @v: pointer to atomic_t
1071  *
1072  * Atomically updates @v to (@v ^ @i) with release ordering.
1073  *
1074  * Unsafe to use in noinstr code; use raw_atomic_fetch_xor_release() there.
1075  *
1076  * Return: The original value of @v.
1077  */
1078 static __always_inline int
atomic_fetch_xor_release(int i,atomic_t * v)1079 atomic_fetch_xor_release(int i, atomic_t *v)
1080 {
1081 	kcsan_release();
1082 	instrument_atomic_read_write(v, sizeof(*v));
1083 	return raw_atomic_fetch_xor_release(i, v);
1084 }
1085 
1086 /**
1087  * atomic_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
1088  * @i: int value
1089  * @v: pointer to atomic_t
1090  *
1091  * Atomically updates @v to (@v ^ @i) with relaxed ordering.
1092  *
1093  * Unsafe to use in noinstr code; use raw_atomic_fetch_xor_relaxed() there.
1094  *
1095  * Return: The original value of @v.
1096  */
1097 static __always_inline int
atomic_fetch_xor_relaxed(int i,atomic_t * v)1098 atomic_fetch_xor_relaxed(int i, atomic_t *v)
1099 {
1100 	instrument_atomic_read_write(v, sizeof(*v));
1101 	return raw_atomic_fetch_xor_relaxed(i, v);
1102 }
1103 
1104 /**
1105  * atomic_xchg() - atomic exchange with full ordering
1106  * @v: pointer to atomic_t
1107  * @new: int value to assign
1108  *
1109  * Atomically updates @v to @new with full ordering.
1110  *
1111  * Unsafe to use in noinstr code; use raw_atomic_xchg() there.
1112  *
1113  * Return: The original value of @v.
1114  */
1115 static __always_inline int
atomic_xchg(atomic_t * v,int new)1116 atomic_xchg(atomic_t *v, int new)
1117 {
1118 	kcsan_mb();
1119 	instrument_atomic_read_write(v, sizeof(*v));
1120 	return raw_atomic_xchg(v, new);
1121 }
1122 
1123 /**
1124  * atomic_xchg_acquire() - atomic exchange with acquire ordering
1125  * @v: pointer to atomic_t
1126  * @new: int value to assign
1127  *
1128  * Atomically updates @v to @new with acquire ordering.
1129  *
1130  * Unsafe to use in noinstr code; use raw_atomic_xchg_acquire() there.
1131  *
1132  * Return: The original value of @v.
1133  */
1134 static __always_inline int
atomic_xchg_acquire(atomic_t * v,int new)1135 atomic_xchg_acquire(atomic_t *v, int new)
1136 {
1137 	instrument_atomic_read_write(v, sizeof(*v));
1138 	return raw_atomic_xchg_acquire(v, new);
1139 }
1140 
1141 /**
1142  * atomic_xchg_release() - atomic exchange with release ordering
1143  * @v: pointer to atomic_t
1144  * @new: int value to assign
1145  *
1146  * Atomically updates @v to @new with release ordering.
1147  *
1148  * Unsafe to use in noinstr code; use raw_atomic_xchg_release() there.
1149  *
1150  * Return: The original value of @v.
1151  */
1152 static __always_inline int
atomic_xchg_release(atomic_t * v,int new)1153 atomic_xchg_release(atomic_t *v, int new)
1154 {
1155 	kcsan_release();
1156 	instrument_atomic_read_write(v, sizeof(*v));
1157 	return raw_atomic_xchg_release(v, new);
1158 }
1159 
1160 /**
1161  * atomic_xchg_relaxed() - atomic exchange with relaxed ordering
1162  * @v: pointer to atomic_t
1163  * @new: int value to assign
1164  *
1165  * Atomically updates @v to @new with relaxed ordering.
1166  *
1167  * Unsafe to use in noinstr code; use raw_atomic_xchg_relaxed() there.
1168  *
1169  * Return: The original value of @v.
1170  */
1171 static __always_inline int
atomic_xchg_relaxed(atomic_t * v,int new)1172 atomic_xchg_relaxed(atomic_t *v, int new)
1173 {
1174 	instrument_atomic_read_write(v, sizeof(*v));
1175 	return raw_atomic_xchg_relaxed(v, new);
1176 }
1177 
1178 /**
1179  * atomic_cmpxchg() - atomic compare and exchange with full ordering
1180  * @v: pointer to atomic_t
1181  * @old: int value to compare with
1182  * @new: int value to assign
1183  *
1184  * If (@v == @old), atomically updates @v to @new with full ordering.
1185  * Otherwise, @v is not modified and relaxed ordering is provided.
1186  *
1187  * Unsafe to use in noinstr code; use raw_atomic_cmpxchg() there.
1188  *
1189  * Return: The original value of @v.
1190  */
1191 static __always_inline int
atomic_cmpxchg(atomic_t * v,int old,int new)1192 atomic_cmpxchg(atomic_t *v, int old, int new)
1193 {
1194 	kcsan_mb();
1195 	instrument_atomic_read_write(v, sizeof(*v));
1196 	return raw_atomic_cmpxchg(v, old, new);
1197 }
1198 
1199 /**
1200  * atomic_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
1201  * @v: pointer to atomic_t
1202  * @old: int value to compare with
1203  * @new: int value to assign
1204  *
1205  * If (@v == @old), atomically updates @v to @new with acquire ordering.
1206  * Otherwise, @v is not modified and relaxed ordering is provided.
1207  *
1208  * Unsafe to use in noinstr code; use raw_atomic_cmpxchg_acquire() there.
1209  *
1210  * Return: The original value of @v.
1211  */
1212 static __always_inline int
atomic_cmpxchg_acquire(atomic_t * v,int old,int new)1213 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
1214 {
1215 	instrument_atomic_read_write(v, sizeof(*v));
1216 	return raw_atomic_cmpxchg_acquire(v, old, new);
1217 }
1218 
1219 /**
1220  * atomic_cmpxchg_release() - atomic compare and exchange with release ordering
1221  * @v: pointer to atomic_t
1222  * @old: int value to compare with
1223  * @new: int value to assign
1224  *
1225  * If (@v == @old), atomically updates @v to @new with release ordering.
1226  * Otherwise, @v is not modified and relaxed ordering is provided.
1227  *
1228  * Unsafe to use in noinstr code; use raw_atomic_cmpxchg_release() there.
1229  *
1230  * Return: The original value of @v.
1231  */
1232 static __always_inline int
atomic_cmpxchg_release(atomic_t * v,int old,int new)1233 atomic_cmpxchg_release(atomic_t *v, int old, int new)
1234 {
1235 	kcsan_release();
1236 	instrument_atomic_read_write(v, sizeof(*v));
1237 	return raw_atomic_cmpxchg_release(v, old, new);
1238 }
1239 
1240 /**
1241  * atomic_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
1242  * @v: pointer to atomic_t
1243  * @old: int value to compare with
1244  * @new: int value to assign
1245  *
1246  * If (@v == @old), atomically updates @v to @new with relaxed ordering.
1247  * Otherwise, @v is not modified and relaxed ordering is provided.
1248  *
1249  * Unsafe to use in noinstr code; use raw_atomic_cmpxchg_relaxed() there.
1250  *
1251  * Return: The original value of @v.
1252  */
1253 static __always_inline int
atomic_cmpxchg_relaxed(atomic_t * v,int old,int new)1254 atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
1255 {
1256 	instrument_atomic_read_write(v, sizeof(*v));
1257 	return raw_atomic_cmpxchg_relaxed(v, old, new);
1258 }
1259 
1260 /**
1261  * atomic_try_cmpxchg() - atomic compare and exchange with full ordering
1262  * @v: pointer to atomic_t
1263  * @old: pointer to int value to compare with
1264  * @new: int value to assign
1265  *
1266  * If (@v == @old), atomically updates @v to @new with full ordering.
1267  * Otherwise, @v is not modified, @old is updated to the current value of @v,
1268  * and relaxed ordering is provided.
1269  *
1270  * Unsafe to use in noinstr code; use raw_atomic_try_cmpxchg() there.
1271  *
1272  * Return: @true if the exchange occured, @false otherwise.
1273  */
1274 static __always_inline bool
atomic_try_cmpxchg(atomic_t * v,int * old,int new)1275 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1276 {
1277 	kcsan_mb();
1278 	instrument_atomic_read_write(v, sizeof(*v));
1279 	instrument_atomic_read_write(old, sizeof(*old));
1280 	return raw_atomic_try_cmpxchg(v, old, new);
1281 }
1282 
1283 /**
1284  * atomic_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
1285  * @v: pointer to atomic_t
1286  * @old: pointer to int value to compare with
1287  * @new: int value to assign
1288  *
1289  * If (@v == @old), atomically updates @v to @new with acquire ordering.
1290  * Otherwise, @v is not modified, @old is updated to the current value of @v,
1291  * and relaxed ordering is provided.
1292  *
1293  * Unsafe to use in noinstr code; use raw_atomic_try_cmpxchg_acquire() there.
1294  *
1295  * Return: @true if the exchange occured, @false otherwise.
1296  */
1297 static __always_inline bool
atomic_try_cmpxchg_acquire(atomic_t * v,int * old,int new)1298 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1299 {
1300 	instrument_atomic_read_write(v, sizeof(*v));
1301 	instrument_atomic_read_write(old, sizeof(*old));
1302 	return raw_atomic_try_cmpxchg_acquire(v, old, new);
1303 }
1304 
1305 /**
1306  * atomic_try_cmpxchg_release() - atomic compare and exchange with release ordering
1307  * @v: pointer to atomic_t
1308  * @old: pointer to int value to compare with
1309  * @new: int value to assign
1310  *
1311  * If (@v == @old), atomically updates @v to @new with release ordering.
1312  * Otherwise, @v is not modified, @old is updated to the current value of @v,
1313  * and relaxed ordering is provided.
1314  *
1315  * Unsafe to use in noinstr code; use raw_atomic_try_cmpxchg_release() there.
1316  *
1317  * Return: @true if the exchange occured, @false otherwise.
1318  */
1319 static __always_inline bool
atomic_try_cmpxchg_release(atomic_t * v,int * old,int new)1320 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1321 {
1322 	kcsan_release();
1323 	instrument_atomic_read_write(v, sizeof(*v));
1324 	instrument_atomic_read_write(old, sizeof(*old));
1325 	return raw_atomic_try_cmpxchg_release(v, old, new);
1326 }
1327 
1328 /**
1329  * atomic_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
1330  * @v: pointer to atomic_t
1331  * @old: pointer to int value to compare with
1332  * @new: int value to assign
1333  *
1334  * If (@v == @old), atomically updates @v to @new with relaxed ordering.
1335  * Otherwise, @v is not modified, @old is updated to the current value of @v,
1336  * and relaxed ordering is provided.
1337  *
1338  * Unsafe to use in noinstr code; use raw_atomic_try_cmpxchg_relaxed() there.
1339  *
1340  * Return: @true if the exchange occured, @false otherwise.
1341  */
1342 static __always_inline bool
atomic_try_cmpxchg_relaxed(atomic_t * v,int * old,int new)1343 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1344 {
1345 	instrument_atomic_read_write(v, sizeof(*v));
1346 	instrument_atomic_read_write(old, sizeof(*old));
1347 	return raw_atomic_try_cmpxchg_relaxed(v, old, new);
1348 }
1349 
1350 /**
1351  * atomic_sub_and_test() - atomic subtract and test if zero with full ordering
1352  * @i: int value to add
1353  * @v: pointer to atomic_t
1354  *
1355  * Atomically updates @v to (@v - @i) with full ordering.
1356  *
1357  * Unsafe to use in noinstr code; use raw_atomic_sub_and_test() there.
1358  *
1359  * Return: @true if the resulting value of @v is zero, @false otherwise.
1360  */
1361 static __always_inline bool
atomic_sub_and_test(int i,atomic_t * v)1362 atomic_sub_and_test(int i, atomic_t *v)
1363 {
1364 	kcsan_mb();
1365 	instrument_atomic_read_write(v, sizeof(*v));
1366 	return raw_atomic_sub_and_test(i, v);
1367 }
1368 
1369 /**
1370  * atomic_dec_and_test() - atomic decrement and test if zero with full ordering
1371  * @v: pointer to atomic_t
1372  *
1373  * Atomically updates @v to (@v - 1) with full ordering.
1374  *
1375  * Unsafe to use in noinstr code; use raw_atomic_dec_and_test() there.
1376  *
1377  * Return: @true if the resulting value of @v is zero, @false otherwise.
1378  */
1379 static __always_inline bool
atomic_dec_and_test(atomic_t * v)1380 atomic_dec_and_test(atomic_t *v)
1381 {
1382 	kcsan_mb();
1383 	instrument_atomic_read_write(v, sizeof(*v));
1384 	return raw_atomic_dec_and_test(v);
1385 }
1386 
1387 /**
1388  * atomic_inc_and_test() - atomic increment and test if zero with full ordering
1389  * @v: pointer to atomic_t
1390  *
1391  * Atomically updates @v to (@v + 1) with full ordering.
1392  *
1393  * Unsafe to use in noinstr code; use raw_atomic_inc_and_test() there.
1394  *
1395  * Return: @true if the resulting value of @v is zero, @false otherwise.
1396  */
1397 static __always_inline bool
atomic_inc_and_test(atomic_t * v)1398 atomic_inc_and_test(atomic_t *v)
1399 {
1400 	kcsan_mb();
1401 	instrument_atomic_read_write(v, sizeof(*v));
1402 	return raw_atomic_inc_and_test(v);
1403 }
1404 
1405 /**
1406  * atomic_add_negative() - atomic add and test if negative with full ordering
1407  * @i: int value to add
1408  * @v: pointer to atomic_t
1409  *
1410  * Atomically updates @v to (@v + @i) with full ordering.
1411  *
1412  * Unsafe to use in noinstr code; use raw_atomic_add_negative() there.
1413  *
1414  * Return: @true if the resulting value of @v is negative, @false otherwise.
1415  */
1416 static __always_inline bool
atomic_add_negative(int i,atomic_t * v)1417 atomic_add_negative(int i, atomic_t *v)
1418 {
1419 	kcsan_mb();
1420 	instrument_atomic_read_write(v, sizeof(*v));
1421 	return raw_atomic_add_negative(i, v);
1422 }
1423 
1424 /**
1425  * atomic_add_negative_acquire() - atomic add and test if negative with acquire ordering
1426  * @i: int value to add
1427  * @v: pointer to atomic_t
1428  *
1429  * Atomically updates @v to (@v + @i) with acquire ordering.
1430  *
1431  * Unsafe to use in noinstr code; use raw_atomic_add_negative_acquire() there.
1432  *
1433  * Return: @true if the resulting value of @v is negative, @false otherwise.
1434  */
1435 static __always_inline bool
atomic_add_negative_acquire(int i,atomic_t * v)1436 atomic_add_negative_acquire(int i, atomic_t *v)
1437 {
1438 	instrument_atomic_read_write(v, sizeof(*v));
1439 	return raw_atomic_add_negative_acquire(i, v);
1440 }
1441 
1442 /**
1443  * atomic_add_negative_release() - atomic add and test if negative with release ordering
1444  * @i: int value to add
1445  * @v: pointer to atomic_t
1446  *
1447  * Atomically updates @v to (@v + @i) with release ordering.
1448  *
1449  * Unsafe to use in noinstr code; use raw_atomic_add_negative_release() there.
1450  *
1451  * Return: @true if the resulting value of @v is negative, @false otherwise.
1452  */
1453 static __always_inline bool
atomic_add_negative_release(int i,atomic_t * v)1454 atomic_add_negative_release(int i, atomic_t *v)
1455 {
1456 	kcsan_release();
1457 	instrument_atomic_read_write(v, sizeof(*v));
1458 	return raw_atomic_add_negative_release(i, v);
1459 }
1460 
1461 /**
1462  * atomic_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
1463  * @i: int value to add
1464  * @v: pointer to atomic_t
1465  *
1466  * Atomically updates @v to (@v + @i) with relaxed ordering.
1467  *
1468  * Unsafe to use in noinstr code; use raw_atomic_add_negative_relaxed() there.
1469  *
1470  * Return: @true if the resulting value of @v is negative, @false otherwise.
1471  */
1472 static __always_inline bool
atomic_add_negative_relaxed(int i,atomic_t * v)1473 atomic_add_negative_relaxed(int i, atomic_t *v)
1474 {
1475 	instrument_atomic_read_write(v, sizeof(*v));
1476 	return raw_atomic_add_negative_relaxed(i, v);
1477 }
1478 
1479 /**
1480  * atomic_fetch_add_unless() - atomic add unless value with full ordering
1481  * @v: pointer to atomic_t
1482  * @a: int value to add
1483  * @u: int value to compare with
1484  *
1485  * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
1486  * Otherwise, @v is not modified and relaxed ordering is provided.
1487  *
1488  * Unsafe to use in noinstr code; use raw_atomic_fetch_add_unless() there.
1489  *
1490  * Return: The original value of @v.
1491  */
1492 static __always_inline int
atomic_fetch_add_unless(atomic_t * v,int a,int u)1493 atomic_fetch_add_unless(atomic_t *v, int a, int u)
1494 {
1495 	kcsan_mb();
1496 	instrument_atomic_read_write(v, sizeof(*v));
1497 	return raw_atomic_fetch_add_unless(v, a, u);
1498 }
1499 
1500 /**
1501  * atomic_add_unless() - atomic add unless value with full ordering
1502  * @v: pointer to atomic_t
1503  * @a: int value to add
1504  * @u: int value to compare with
1505  *
1506  * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
1507  * Otherwise, @v is not modified and relaxed ordering is provided.
1508  *
1509  * Unsafe to use in noinstr code; use raw_atomic_add_unless() there.
1510  *
1511  * Return: @true if @v was updated, @false otherwise.
1512  */
1513 static __always_inline bool
atomic_add_unless(atomic_t * v,int a,int u)1514 atomic_add_unless(atomic_t *v, int a, int u)
1515 {
1516 	kcsan_mb();
1517 	instrument_atomic_read_write(v, sizeof(*v));
1518 	return raw_atomic_add_unless(v, a, u);
1519 }
1520 
1521 /**
1522  * atomic_inc_not_zero() - atomic increment unless zero with full ordering
1523  * @v: pointer to atomic_t
1524  *
1525  * If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
1526  * Otherwise, @v is not modified and relaxed ordering is provided.
1527  *
1528  * Unsafe to use in noinstr code; use raw_atomic_inc_not_zero() there.
1529  *
1530  * Return: @true if @v was updated, @false otherwise.
1531  */
1532 static __always_inline bool
atomic_inc_not_zero(atomic_t * v)1533 atomic_inc_not_zero(atomic_t *v)
1534 {
1535 	kcsan_mb();
1536 	instrument_atomic_read_write(v, sizeof(*v));
1537 	return raw_atomic_inc_not_zero(v);
1538 }
1539 
1540 /**
1541  * atomic_inc_unless_negative() - atomic increment unless negative with full ordering
1542  * @v: pointer to atomic_t
1543  *
1544  * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
1545  * Otherwise, @v is not modified and relaxed ordering is provided.
1546  *
1547  * Unsafe to use in noinstr code; use raw_atomic_inc_unless_negative() there.
1548  *
1549  * Return: @true if @v was updated, @false otherwise.
1550  */
1551 static __always_inline bool
atomic_inc_unless_negative(atomic_t * v)1552 atomic_inc_unless_negative(atomic_t *v)
1553 {
1554 	kcsan_mb();
1555 	instrument_atomic_read_write(v, sizeof(*v));
1556 	return raw_atomic_inc_unless_negative(v);
1557 }
1558 
1559 /**
1560  * atomic_dec_unless_positive() - atomic decrement unless positive with full ordering
1561  * @v: pointer to atomic_t
1562  *
1563  * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
1564  * Otherwise, @v is not modified and relaxed ordering is provided.
1565  *
1566  * Unsafe to use in noinstr code; use raw_atomic_dec_unless_positive() there.
1567  *
1568  * Return: @true if @v was updated, @false otherwise.
1569  */
1570 static __always_inline bool
atomic_dec_unless_positive(atomic_t * v)1571 atomic_dec_unless_positive(atomic_t *v)
1572 {
1573 	kcsan_mb();
1574 	instrument_atomic_read_write(v, sizeof(*v));
1575 	return raw_atomic_dec_unless_positive(v);
1576 }
1577 
1578 /**
1579  * atomic_dec_if_positive() - atomic decrement if positive with full ordering
1580  * @v: pointer to atomic_t
1581  *
1582  * If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
1583  * Otherwise, @v is not modified and relaxed ordering is provided.
1584  *
1585  * Unsafe to use in noinstr code; use raw_atomic_dec_if_positive() there.
1586  *
1587  * Return: The old value of (@v - 1), regardless of whether @v was updated.
1588  */
1589 static __always_inline int
atomic_dec_if_positive(atomic_t * v)1590 atomic_dec_if_positive(atomic_t *v)
1591 {
1592 	kcsan_mb();
1593 	instrument_atomic_read_write(v, sizeof(*v));
1594 	return raw_atomic_dec_if_positive(v);
1595 }
1596 
1597 /**
1598  * atomic64_read() - atomic load with relaxed ordering
1599  * @v: pointer to atomic64_t
1600  *
1601  * Atomically loads the value of @v with relaxed ordering.
1602  *
1603  * Unsafe to use in noinstr code; use raw_atomic64_read() there.
1604  *
1605  * Return: The value loaded from @v.
1606  */
1607 static __always_inline s64
atomic64_read(const atomic64_t * v)1608 atomic64_read(const atomic64_t *v)
1609 {
1610 	instrument_atomic_read(v, sizeof(*v));
1611 	return raw_atomic64_read(v);
1612 }
1613 
1614 /**
1615  * atomic64_read_acquire() - atomic load with acquire ordering
1616  * @v: pointer to atomic64_t
1617  *
1618  * Atomically loads the value of @v with acquire ordering.
1619  *
1620  * Unsafe to use in noinstr code; use raw_atomic64_read_acquire() there.
1621  *
1622  * Return: The value loaded from @v.
1623  */
1624 static __always_inline s64
atomic64_read_acquire(const atomic64_t * v)1625 atomic64_read_acquire(const atomic64_t *v)
1626 {
1627 	instrument_atomic_read(v, sizeof(*v));
1628 	return raw_atomic64_read_acquire(v);
1629 }
1630 
1631 /**
1632  * atomic64_set() - atomic set with relaxed ordering
1633  * @v: pointer to atomic64_t
1634  * @i: s64 value to assign
1635  *
1636  * Atomically sets @v to @i with relaxed ordering.
1637  *
1638  * Unsafe to use in noinstr code; use raw_atomic64_set() there.
1639  *
1640  * Return: Nothing.
1641  */
1642 static __always_inline void
atomic64_set(atomic64_t * v,s64 i)1643 atomic64_set(atomic64_t *v, s64 i)
1644 {
1645 	instrument_atomic_write(v, sizeof(*v));
1646 	raw_atomic64_set(v, i);
1647 }
1648 
1649 /**
1650  * atomic64_set_release() - atomic set with release ordering
1651  * @v: pointer to atomic64_t
1652  * @i: s64 value to assign
1653  *
1654  * Atomically sets @v to @i with release ordering.
1655  *
1656  * Unsafe to use in noinstr code; use raw_atomic64_set_release() there.
1657  *
1658  * Return: Nothing.
1659  */
1660 static __always_inline void
atomic64_set_release(atomic64_t * v,s64 i)1661 atomic64_set_release(atomic64_t *v, s64 i)
1662 {
1663 	kcsan_release();
1664 	instrument_atomic_write(v, sizeof(*v));
1665 	raw_atomic64_set_release(v, i);
1666 }
1667 
1668 /**
1669  * atomic64_add() - atomic add with relaxed ordering
1670  * @i: s64 value to add
1671  * @v: pointer to atomic64_t
1672  *
1673  * Atomically updates @v to (@v + @i) with relaxed ordering.
1674  *
1675  * Unsafe to use in noinstr code; use raw_atomic64_add() there.
1676  *
1677  * Return: Nothing.
1678  */
1679 static __always_inline void
atomic64_add(s64 i,atomic64_t * v)1680 atomic64_add(s64 i, atomic64_t *v)
1681 {
1682 	instrument_atomic_read_write(v, sizeof(*v));
1683 	raw_atomic64_add(i, v);
1684 }
1685 
1686 /**
1687  * atomic64_add_return() - atomic add with full ordering
1688  * @i: s64 value to add
1689  * @v: pointer to atomic64_t
1690  *
1691  * Atomically updates @v to (@v + @i) with full ordering.
1692  *
1693  * Unsafe to use in noinstr code; use raw_atomic64_add_return() there.
1694  *
1695  * Return: The updated value of @v.
1696  */
1697 static __always_inline s64
atomic64_add_return(s64 i,atomic64_t * v)1698 atomic64_add_return(s64 i, atomic64_t *v)
1699 {
1700 	kcsan_mb();
1701 	instrument_atomic_read_write(v, sizeof(*v));
1702 	return raw_atomic64_add_return(i, v);
1703 }
1704 
1705 /**
1706  * atomic64_add_return_acquire() - atomic add with acquire ordering
1707  * @i: s64 value to add
1708  * @v: pointer to atomic64_t
1709  *
1710  * Atomically updates @v to (@v + @i) with acquire ordering.
1711  *
1712  * Unsafe to use in noinstr code; use raw_atomic64_add_return_acquire() there.
1713  *
1714  * Return: The updated value of @v.
1715  */
1716 static __always_inline s64
atomic64_add_return_acquire(s64 i,atomic64_t * v)1717 atomic64_add_return_acquire(s64 i, atomic64_t *v)
1718 {
1719 	instrument_atomic_read_write(v, sizeof(*v));
1720 	return raw_atomic64_add_return_acquire(i, v);
1721 }
1722 
1723 /**
1724  * atomic64_add_return_release() - atomic add with release ordering
1725  * @i: s64 value to add
1726  * @v: pointer to atomic64_t
1727  *
1728  * Atomically updates @v to (@v + @i) with release ordering.
1729  *
1730  * Unsafe to use in noinstr code; use raw_atomic64_add_return_release() there.
1731  *
1732  * Return: The updated value of @v.
1733  */
1734 static __always_inline s64
atomic64_add_return_release(s64 i,atomic64_t * v)1735 atomic64_add_return_release(s64 i, atomic64_t *v)
1736 {
1737 	kcsan_release();
1738 	instrument_atomic_read_write(v, sizeof(*v));
1739 	return raw_atomic64_add_return_release(i, v);
1740 }
1741 
1742 /**
1743  * atomic64_add_return_relaxed() - atomic add with relaxed ordering
1744  * @i: s64 value to add
1745  * @v: pointer to atomic64_t
1746  *
1747  * Atomically updates @v to (@v + @i) with relaxed ordering.
1748  *
1749  * Unsafe to use in noinstr code; use raw_atomic64_add_return_relaxed() there.
1750  *
1751  * Return: The updated value of @v.
1752  */
1753 static __always_inline s64
atomic64_add_return_relaxed(s64 i,atomic64_t * v)1754 atomic64_add_return_relaxed(s64 i, atomic64_t *v)
1755 {
1756 	instrument_atomic_read_write(v, sizeof(*v));
1757 	return raw_atomic64_add_return_relaxed(i, v);
1758 }
1759 
1760 /**
1761  * atomic64_fetch_add() - atomic add with full ordering
1762  * @i: s64 value to add
1763  * @v: pointer to atomic64_t
1764  *
1765  * Atomically updates @v to (@v + @i) with full ordering.
1766  *
1767  * Unsafe to use in noinstr code; use raw_atomic64_fetch_add() there.
1768  *
1769  * Return: The original value of @v.
1770  */
1771 static __always_inline s64
atomic64_fetch_add(s64 i,atomic64_t * v)1772 atomic64_fetch_add(s64 i, atomic64_t *v)
1773 {
1774 	kcsan_mb();
1775 	instrument_atomic_read_write(v, sizeof(*v));
1776 	return raw_atomic64_fetch_add(i, v);
1777 }
1778 
1779 /**
1780  * atomic64_fetch_add_acquire() - atomic add with acquire ordering
1781  * @i: s64 value to add
1782  * @v: pointer to atomic64_t
1783  *
1784  * Atomically updates @v to (@v + @i) with acquire ordering.
1785  *
1786  * Unsafe to use in noinstr code; use raw_atomic64_fetch_add_acquire() there.
1787  *
1788  * Return: The original value of @v.
1789  */
1790 static __always_inline s64
atomic64_fetch_add_acquire(s64 i,atomic64_t * v)1791 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1792 {
1793 	instrument_atomic_read_write(v, sizeof(*v));
1794 	return raw_atomic64_fetch_add_acquire(i, v);
1795 }
1796 
1797 /**
1798  * atomic64_fetch_add_release() - atomic add with release ordering
1799  * @i: s64 value to add
1800  * @v: pointer to atomic64_t
1801  *
1802  * Atomically updates @v to (@v + @i) with release ordering.
1803  *
1804  * Unsafe to use in noinstr code; use raw_atomic64_fetch_add_release() there.
1805  *
1806  * Return: The original value of @v.
1807  */
1808 static __always_inline s64
atomic64_fetch_add_release(s64 i,atomic64_t * v)1809 atomic64_fetch_add_release(s64 i, atomic64_t *v)
1810 {
1811 	kcsan_release();
1812 	instrument_atomic_read_write(v, sizeof(*v));
1813 	return raw_atomic64_fetch_add_release(i, v);
1814 }
1815 
1816 /**
1817  * atomic64_fetch_add_relaxed() - atomic add with relaxed ordering
1818  * @i: s64 value to add
1819  * @v: pointer to atomic64_t
1820  *
1821  * Atomically updates @v to (@v + @i) with relaxed ordering.
1822  *
1823  * Unsafe to use in noinstr code; use raw_atomic64_fetch_add_relaxed() there.
1824  *
1825  * Return: The original value of @v.
1826  */
1827 static __always_inline s64
atomic64_fetch_add_relaxed(s64 i,atomic64_t * v)1828 atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
1829 {
1830 	instrument_atomic_read_write(v, sizeof(*v));
1831 	return raw_atomic64_fetch_add_relaxed(i, v);
1832 }
1833 
1834 /**
1835  * atomic64_sub() - atomic subtract with relaxed ordering
1836  * @i: s64 value to subtract
1837  * @v: pointer to atomic64_t
1838  *
1839  * Atomically updates @v to (@v - @i) with relaxed ordering.
1840  *
1841  * Unsafe to use in noinstr code; use raw_atomic64_sub() there.
1842  *
1843  * Return: Nothing.
1844  */
1845 static __always_inline void
atomic64_sub(s64 i,atomic64_t * v)1846 atomic64_sub(s64 i, atomic64_t *v)
1847 {
1848 	instrument_atomic_read_write(v, sizeof(*v));
1849 	raw_atomic64_sub(i, v);
1850 }
1851 
1852 /**
1853  * atomic64_sub_return() - atomic subtract with full ordering
1854  * @i: s64 value to subtract
1855  * @v: pointer to atomic64_t
1856  *
1857  * Atomically updates @v to (@v - @i) with full ordering.
1858  *
1859  * Unsafe to use in noinstr code; use raw_atomic64_sub_return() there.
1860  *
1861  * Return: The updated value of @v.
1862  */
1863 static __always_inline s64
atomic64_sub_return(s64 i,atomic64_t * v)1864 atomic64_sub_return(s64 i, atomic64_t *v)
1865 {
1866 	kcsan_mb();
1867 	instrument_atomic_read_write(v, sizeof(*v));
1868 	return raw_atomic64_sub_return(i, v);
1869 }
1870 
1871 /**
1872  * atomic64_sub_return_acquire() - atomic subtract with acquire ordering
1873  * @i: s64 value to subtract
1874  * @v: pointer to atomic64_t
1875  *
1876  * Atomically updates @v to (@v - @i) with acquire ordering.
1877  *
1878  * Unsafe to use in noinstr code; use raw_atomic64_sub_return_acquire() there.
1879  *
1880  * Return: The updated value of @v.
1881  */
1882 static __always_inline s64
atomic64_sub_return_acquire(s64 i,atomic64_t * v)1883 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1884 {
1885 	instrument_atomic_read_write(v, sizeof(*v));
1886 	return raw_atomic64_sub_return_acquire(i, v);
1887 }
1888 
1889 /**
1890  * atomic64_sub_return_release() - atomic subtract with release ordering
1891  * @i: s64 value to subtract
1892  * @v: pointer to atomic64_t
1893  *
1894  * Atomically updates @v to (@v - @i) with release ordering.
1895  *
1896  * Unsafe to use in noinstr code; use raw_atomic64_sub_return_release() there.
1897  *
1898  * Return: The updated value of @v.
1899  */
1900 static __always_inline s64
atomic64_sub_return_release(s64 i,atomic64_t * v)1901 atomic64_sub_return_release(s64 i, atomic64_t *v)
1902 {
1903 	kcsan_release();
1904 	instrument_atomic_read_write(v, sizeof(*v));
1905 	return raw_atomic64_sub_return_release(i, v);
1906 }
1907 
1908 /**
1909  * atomic64_sub_return_relaxed() - atomic subtract with relaxed ordering
1910  * @i: s64 value to subtract
1911  * @v: pointer to atomic64_t
1912  *
1913  * Atomically updates @v to (@v - @i) with relaxed ordering.
1914  *
1915  * Unsafe to use in noinstr code; use raw_atomic64_sub_return_relaxed() there.
1916  *
1917  * Return: The updated value of @v.
1918  */
1919 static __always_inline s64
atomic64_sub_return_relaxed(s64 i,atomic64_t * v)1920 atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
1921 {
1922 	instrument_atomic_read_write(v, sizeof(*v));
1923 	return raw_atomic64_sub_return_relaxed(i, v);
1924 }
1925 
1926 /**
1927  * atomic64_fetch_sub() - atomic subtract with full ordering
1928  * @i: s64 value to subtract
1929  * @v: pointer to atomic64_t
1930  *
1931  * Atomically updates @v to (@v - @i) with full ordering.
1932  *
1933  * Unsafe to use in noinstr code; use raw_atomic64_fetch_sub() there.
1934  *
1935  * Return: The original value of @v.
1936  */
1937 static __always_inline s64
atomic64_fetch_sub(s64 i,atomic64_t * v)1938 atomic64_fetch_sub(s64 i, atomic64_t *v)
1939 {
1940 	kcsan_mb();
1941 	instrument_atomic_read_write(v, sizeof(*v));
1942 	return raw_atomic64_fetch_sub(i, v);
1943 }
1944 
1945 /**
1946  * atomic64_fetch_sub_acquire() - atomic subtract with acquire ordering
1947  * @i: s64 value to subtract
1948  * @v: pointer to atomic64_t
1949  *
1950  * Atomically updates @v to (@v - @i) with acquire ordering.
1951  *
1952  * Unsafe to use in noinstr code; use raw_atomic64_fetch_sub_acquire() there.
1953  *
1954  * Return: The original value of @v.
1955  */
1956 static __always_inline s64
atomic64_fetch_sub_acquire(s64 i,atomic64_t * v)1957 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1958 {
1959 	instrument_atomic_read_write(v, sizeof(*v));
1960 	return raw_atomic64_fetch_sub_acquire(i, v);
1961 }
1962 
1963 /**
1964  * atomic64_fetch_sub_release() - atomic subtract with release ordering
1965  * @i: s64 value to subtract
1966  * @v: pointer to atomic64_t
1967  *
1968  * Atomically updates @v to (@v - @i) with release ordering.
1969  *
1970  * Unsafe to use in noinstr code; use raw_atomic64_fetch_sub_release() there.
1971  *
1972  * Return: The original value of @v.
1973  */
1974 static __always_inline s64
atomic64_fetch_sub_release(s64 i,atomic64_t * v)1975 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1976 {
1977 	kcsan_release();
1978 	instrument_atomic_read_write(v, sizeof(*v));
1979 	return raw_atomic64_fetch_sub_release(i, v);
1980 }
1981 
1982 /**
1983  * atomic64_fetch_sub_relaxed() - atomic subtract with relaxed ordering
1984  * @i: s64 value to subtract
1985  * @v: pointer to atomic64_t
1986  *
1987  * Atomically updates @v to (@v - @i) with relaxed ordering.
1988  *
1989  * Unsafe to use in noinstr code; use raw_atomic64_fetch_sub_relaxed() there.
1990  *
1991  * Return: The original value of @v.
1992  */
1993 static __always_inline s64
atomic64_fetch_sub_relaxed(s64 i,atomic64_t * v)1994 atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
1995 {
1996 	instrument_atomic_read_write(v, sizeof(*v));
1997 	return raw_atomic64_fetch_sub_relaxed(i, v);
1998 }
1999 
2000 /**
2001  * atomic64_inc() - atomic increment with relaxed ordering
2002  * @v: pointer to atomic64_t
2003  *
2004  * Atomically updates @v to (@v + 1) with relaxed ordering.
2005  *
2006  * Unsafe to use in noinstr code; use raw_atomic64_inc() there.
2007  *
2008  * Return: Nothing.
2009  */
2010 static __always_inline void
atomic64_inc(atomic64_t * v)2011 atomic64_inc(atomic64_t *v)
2012 {
2013 	instrument_atomic_read_write(v, sizeof(*v));
2014 	raw_atomic64_inc(v);
2015 }
2016 
2017 /**
2018  * atomic64_inc_return() - atomic increment with full ordering
2019  * @v: pointer to atomic64_t
2020  *
2021  * Atomically updates @v to (@v + 1) with full ordering.
2022  *
2023  * Unsafe to use in noinstr code; use raw_atomic64_inc_return() there.
2024  *
2025  * Return: The updated value of @v.
2026  */
2027 static __always_inline s64
atomic64_inc_return(atomic64_t * v)2028 atomic64_inc_return(atomic64_t *v)
2029 {
2030 	kcsan_mb();
2031 	instrument_atomic_read_write(v, sizeof(*v));
2032 	return raw_atomic64_inc_return(v);
2033 }
2034 
2035 /**
2036  * atomic64_inc_return_acquire() - atomic increment with acquire ordering
2037  * @v: pointer to atomic64_t
2038  *
2039  * Atomically updates @v to (@v + 1) with acquire ordering.
2040  *
2041  * Unsafe to use in noinstr code; use raw_atomic64_inc_return_acquire() there.
2042  *
2043  * Return: The updated value of @v.
2044  */
2045 static __always_inline s64
atomic64_inc_return_acquire(atomic64_t * v)2046 atomic64_inc_return_acquire(atomic64_t *v)
2047 {
2048 	instrument_atomic_read_write(v, sizeof(*v));
2049 	return raw_atomic64_inc_return_acquire(v);
2050 }
2051 
2052 /**
2053  * atomic64_inc_return_release() - atomic increment with release ordering
2054  * @v: pointer to atomic64_t
2055  *
2056  * Atomically updates @v to (@v + 1) with release ordering.
2057  *
2058  * Unsafe to use in noinstr code; use raw_atomic64_inc_return_release() there.
2059  *
2060  * Return: The updated value of @v.
2061  */
2062 static __always_inline s64
atomic64_inc_return_release(atomic64_t * v)2063 atomic64_inc_return_release(atomic64_t *v)
2064 {
2065 	kcsan_release();
2066 	instrument_atomic_read_write(v, sizeof(*v));
2067 	return raw_atomic64_inc_return_release(v);
2068 }
2069 
2070 /**
2071  * atomic64_inc_return_relaxed() - atomic increment with relaxed ordering
2072  * @v: pointer to atomic64_t
2073  *
2074  * Atomically updates @v to (@v + 1) with relaxed ordering.
2075  *
2076  * Unsafe to use in noinstr code; use raw_atomic64_inc_return_relaxed() there.
2077  *
2078  * Return: The updated value of @v.
2079  */
2080 static __always_inline s64
atomic64_inc_return_relaxed(atomic64_t * v)2081 atomic64_inc_return_relaxed(atomic64_t *v)
2082 {
2083 	instrument_atomic_read_write(v, sizeof(*v));
2084 	return raw_atomic64_inc_return_relaxed(v);
2085 }
2086 
2087 /**
2088  * atomic64_fetch_inc() - atomic increment with full ordering
2089  * @v: pointer to atomic64_t
2090  *
2091  * Atomically updates @v to (@v + 1) with full ordering.
2092  *
2093  * Unsafe to use in noinstr code; use raw_atomic64_fetch_inc() there.
2094  *
2095  * Return: The original value of @v.
2096  */
2097 static __always_inline s64
atomic64_fetch_inc(atomic64_t * v)2098 atomic64_fetch_inc(atomic64_t *v)
2099 {
2100 	kcsan_mb();
2101 	instrument_atomic_read_write(v, sizeof(*v));
2102 	return raw_atomic64_fetch_inc(v);
2103 }
2104 
2105 /**
2106  * atomic64_fetch_inc_acquire() - atomic increment with acquire ordering
2107  * @v: pointer to atomic64_t
2108  *
2109  * Atomically updates @v to (@v + 1) with acquire ordering.
2110  *
2111  * Unsafe to use in noinstr code; use raw_atomic64_fetch_inc_acquire() there.
2112  *
2113  * Return: The original value of @v.
2114  */
2115 static __always_inline s64
atomic64_fetch_inc_acquire(atomic64_t * v)2116 atomic64_fetch_inc_acquire(atomic64_t *v)
2117 {
2118 	instrument_atomic_read_write(v, sizeof(*v));
2119 	return raw_atomic64_fetch_inc_acquire(v);
2120 }
2121 
2122 /**
2123  * atomic64_fetch_inc_release() - atomic increment with release ordering
2124  * @v: pointer to atomic64_t
2125  *
2126  * Atomically updates @v to (@v + 1) with release ordering.
2127  *
2128  * Unsafe to use in noinstr code; use raw_atomic64_fetch_inc_release() there.
2129  *
2130  * Return: The original value of @v.
2131  */
2132 static __always_inline s64
atomic64_fetch_inc_release(atomic64_t * v)2133 atomic64_fetch_inc_release(atomic64_t *v)
2134 {
2135 	kcsan_release();
2136 	instrument_atomic_read_write(v, sizeof(*v));
2137 	return raw_atomic64_fetch_inc_release(v);
2138 }
2139 
2140 /**
2141  * atomic64_fetch_inc_relaxed() - atomic increment with relaxed ordering
2142  * @v: pointer to atomic64_t
2143  *
2144  * Atomically updates @v to (@v + 1) with relaxed ordering.
2145  *
2146  * Unsafe to use in noinstr code; use raw_atomic64_fetch_inc_relaxed() there.
2147  *
2148  * Return: The original value of @v.
2149  */
2150 static __always_inline s64
atomic64_fetch_inc_relaxed(atomic64_t * v)2151 atomic64_fetch_inc_relaxed(atomic64_t *v)
2152 {
2153 	instrument_atomic_read_write(v, sizeof(*v));
2154 	return raw_atomic64_fetch_inc_relaxed(v);
2155 }
2156 
2157 /**
2158  * atomic64_dec() - atomic decrement with relaxed ordering
2159  * @v: pointer to atomic64_t
2160  *
2161  * Atomically updates @v to (@v - 1) with relaxed ordering.
2162  *
2163  * Unsafe to use in noinstr code; use raw_atomic64_dec() there.
2164  *
2165  * Return: Nothing.
2166  */
2167 static __always_inline void
atomic64_dec(atomic64_t * v)2168 atomic64_dec(atomic64_t *v)
2169 {
2170 	instrument_atomic_read_write(v, sizeof(*v));
2171 	raw_atomic64_dec(v);
2172 }
2173 
2174 /**
2175  * atomic64_dec_return() - atomic decrement with full ordering
2176  * @v: pointer to atomic64_t
2177  *
2178  * Atomically updates @v to (@v - 1) with full ordering.
2179  *
2180  * Unsafe to use in noinstr code; use raw_atomic64_dec_return() there.
2181  *
2182  * Return: The updated value of @v.
2183  */
2184 static __always_inline s64
atomic64_dec_return(atomic64_t * v)2185 atomic64_dec_return(atomic64_t *v)
2186 {
2187 	kcsan_mb();
2188 	instrument_atomic_read_write(v, sizeof(*v));
2189 	return raw_atomic64_dec_return(v);
2190 }
2191 
2192 /**
2193  * atomic64_dec_return_acquire() - atomic decrement with acquire ordering
2194  * @v: pointer to atomic64_t
2195  *
2196  * Atomically updates @v to (@v - 1) with acquire ordering.
2197  *
2198  * Unsafe to use in noinstr code; use raw_atomic64_dec_return_acquire() there.
2199  *
2200  * Return: The updated value of @v.
2201  */
2202 static __always_inline s64
atomic64_dec_return_acquire(atomic64_t * v)2203 atomic64_dec_return_acquire(atomic64_t *v)
2204 {
2205 	instrument_atomic_read_write(v, sizeof(*v));
2206 	return raw_atomic64_dec_return_acquire(v);
2207 }
2208 
2209 /**
2210  * atomic64_dec_return_release() - atomic decrement with release ordering
2211  * @v: pointer to atomic64_t
2212  *
2213  * Atomically updates @v to (@v - 1) with release ordering.
2214  *
2215  * Unsafe to use in noinstr code; use raw_atomic64_dec_return_release() there.
2216  *
2217  * Return: The updated value of @v.
2218  */
2219 static __always_inline s64
atomic64_dec_return_release(atomic64_t * v)2220 atomic64_dec_return_release(atomic64_t *v)
2221 {
2222 	kcsan_release();
2223 	instrument_atomic_read_write(v, sizeof(*v));
2224 	return raw_atomic64_dec_return_release(v);
2225 }
2226 
2227 /**
2228  * atomic64_dec_return_relaxed() - atomic decrement with relaxed ordering
2229  * @v: pointer to atomic64_t
2230  *
2231  * Atomically updates @v to (@v - 1) with relaxed ordering.
2232  *
2233  * Unsafe to use in noinstr code; use raw_atomic64_dec_return_relaxed() there.
2234  *
2235  * Return: The updated value of @v.
2236  */
2237 static __always_inline s64
atomic64_dec_return_relaxed(atomic64_t * v)2238 atomic64_dec_return_relaxed(atomic64_t *v)
2239 {
2240 	instrument_atomic_read_write(v, sizeof(*v));
2241 	return raw_atomic64_dec_return_relaxed(v);
2242 }
2243 
2244 /**
2245  * atomic64_fetch_dec() - atomic decrement with full ordering
2246  * @v: pointer to atomic64_t
2247  *
2248  * Atomically updates @v to (@v - 1) with full ordering.
2249  *
2250  * Unsafe to use in noinstr code; use raw_atomic64_fetch_dec() there.
2251  *
2252  * Return: The original value of @v.
2253  */
2254 static __always_inline s64
atomic64_fetch_dec(atomic64_t * v)2255 atomic64_fetch_dec(atomic64_t *v)
2256 {
2257 	kcsan_mb();
2258 	instrument_atomic_read_write(v, sizeof(*v));
2259 	return raw_atomic64_fetch_dec(v);
2260 }
2261 
2262 /**
2263  * atomic64_fetch_dec_acquire() - atomic decrement with acquire ordering
2264  * @v: pointer to atomic64_t
2265  *
2266  * Atomically updates @v to (@v - 1) with acquire ordering.
2267  *
2268  * Unsafe to use in noinstr code; use raw_atomic64_fetch_dec_acquire() there.
2269  *
2270  * Return: The original value of @v.
2271  */
2272 static __always_inline s64
atomic64_fetch_dec_acquire(atomic64_t * v)2273 atomic64_fetch_dec_acquire(atomic64_t *v)
2274 {
2275 	instrument_atomic_read_write(v, sizeof(*v));
2276 	return raw_atomic64_fetch_dec_acquire(v);
2277 }
2278 
2279 /**
2280  * atomic64_fetch_dec_release() - atomic decrement with release ordering
2281  * @v: pointer to atomic64_t
2282  *
2283  * Atomically updates @v to (@v - 1) with release ordering.
2284  *
2285  * Unsafe to use in noinstr code; use raw_atomic64_fetch_dec_release() there.
2286  *
2287  * Return: The original value of @v.
2288  */
2289 static __always_inline s64
atomic64_fetch_dec_release(atomic64_t * v)2290 atomic64_fetch_dec_release(atomic64_t *v)
2291 {
2292 	kcsan_release();
2293 	instrument_atomic_read_write(v, sizeof(*v));
2294 	return raw_atomic64_fetch_dec_release(v);
2295 }
2296 
2297 /**
2298  * atomic64_fetch_dec_relaxed() - atomic decrement with relaxed ordering
2299  * @v: pointer to atomic64_t
2300  *
2301  * Atomically updates @v to (@v - 1) with relaxed ordering.
2302  *
2303  * Unsafe to use in noinstr code; use raw_atomic64_fetch_dec_relaxed() there.
2304  *
2305  * Return: The original value of @v.
2306  */
2307 static __always_inline s64
atomic64_fetch_dec_relaxed(atomic64_t * v)2308 atomic64_fetch_dec_relaxed(atomic64_t *v)
2309 {
2310 	instrument_atomic_read_write(v, sizeof(*v));
2311 	return raw_atomic64_fetch_dec_relaxed(v);
2312 }
2313 
2314 /**
2315  * atomic64_and() - atomic bitwise AND with relaxed ordering
2316  * @i: s64 value
2317  * @v: pointer to atomic64_t
2318  *
2319  * Atomically updates @v to (@v & @i) with relaxed ordering.
2320  *
2321  * Unsafe to use in noinstr code; use raw_atomic64_and() there.
2322  *
2323  * Return: Nothing.
2324  */
2325 static __always_inline void
atomic64_and(s64 i,atomic64_t * v)2326 atomic64_and(s64 i, atomic64_t *v)
2327 {
2328 	instrument_atomic_read_write(v, sizeof(*v));
2329 	raw_atomic64_and(i, v);
2330 }
2331 
2332 /**
2333  * atomic64_fetch_and() - atomic bitwise AND with full ordering
2334  * @i: s64 value
2335  * @v: pointer to atomic64_t
2336  *
2337  * Atomically updates @v to (@v & @i) with full ordering.
2338  *
2339  * Unsafe to use in noinstr code; use raw_atomic64_fetch_and() there.
2340  *
2341  * Return: The original value of @v.
2342  */
2343 static __always_inline s64
atomic64_fetch_and(s64 i,atomic64_t * v)2344 atomic64_fetch_and(s64 i, atomic64_t *v)
2345 {
2346 	kcsan_mb();
2347 	instrument_atomic_read_write(v, sizeof(*v));
2348 	return raw_atomic64_fetch_and(i, v);
2349 }
2350 
2351 /**
2352  * atomic64_fetch_and_acquire() - atomic bitwise AND with acquire ordering
2353  * @i: s64 value
2354  * @v: pointer to atomic64_t
2355  *
2356  * Atomically updates @v to (@v & @i) with acquire ordering.
2357  *
2358  * Unsafe to use in noinstr code; use raw_atomic64_fetch_and_acquire() there.
2359  *
2360  * Return: The original value of @v.
2361  */
2362 static __always_inline s64
atomic64_fetch_and_acquire(s64 i,atomic64_t * v)2363 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
2364 {
2365 	instrument_atomic_read_write(v, sizeof(*v));
2366 	return raw_atomic64_fetch_and_acquire(i, v);
2367 }
2368 
2369 /**
2370  * atomic64_fetch_and_release() - atomic bitwise AND with release ordering
2371  * @i: s64 value
2372  * @v: pointer to atomic64_t
2373  *
2374  * Atomically updates @v to (@v & @i) with release ordering.
2375  *
2376  * Unsafe to use in noinstr code; use raw_atomic64_fetch_and_release() there.
2377  *
2378  * Return: The original value of @v.
2379  */
2380 static __always_inline s64
atomic64_fetch_and_release(s64 i,atomic64_t * v)2381 atomic64_fetch_and_release(s64 i, atomic64_t *v)
2382 {
2383 	kcsan_release();
2384 	instrument_atomic_read_write(v, sizeof(*v));
2385 	return raw_atomic64_fetch_and_release(i, v);
2386 }
2387 
2388 /**
2389  * atomic64_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
2390  * @i: s64 value
2391  * @v: pointer to atomic64_t
2392  *
2393  * Atomically updates @v to (@v & @i) with relaxed ordering.
2394  *
2395  * Unsafe to use in noinstr code; use raw_atomic64_fetch_and_relaxed() there.
2396  *
2397  * Return: The original value of @v.
2398  */
2399 static __always_inline s64
atomic64_fetch_and_relaxed(s64 i,atomic64_t * v)2400 atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
2401 {
2402 	instrument_atomic_read_write(v, sizeof(*v));
2403 	return raw_atomic64_fetch_and_relaxed(i, v);
2404 }
2405 
2406 /**
2407  * atomic64_andnot() - atomic bitwise AND NOT with relaxed ordering
2408  * @i: s64 value
2409  * @v: pointer to atomic64_t
2410  *
2411  * Atomically updates @v to (@v & ~@i) with relaxed ordering.
2412  *
2413  * Unsafe to use in noinstr code; use raw_atomic64_andnot() there.
2414  *
2415  * Return: Nothing.
2416  */
2417 static __always_inline void
atomic64_andnot(s64 i,atomic64_t * v)2418 atomic64_andnot(s64 i, atomic64_t *v)
2419 {
2420 	instrument_atomic_read_write(v, sizeof(*v));
2421 	raw_atomic64_andnot(i, v);
2422 }
2423 
2424 /**
2425  * atomic64_fetch_andnot() - atomic bitwise AND NOT with full ordering
2426  * @i: s64 value
2427  * @v: pointer to atomic64_t
2428  *
2429  * Atomically updates @v to (@v & ~@i) with full ordering.
2430  *
2431  * Unsafe to use in noinstr code; use raw_atomic64_fetch_andnot() there.
2432  *
2433  * Return: The original value of @v.
2434  */
2435 static __always_inline s64
atomic64_fetch_andnot(s64 i,atomic64_t * v)2436 atomic64_fetch_andnot(s64 i, atomic64_t *v)
2437 {
2438 	kcsan_mb();
2439 	instrument_atomic_read_write(v, sizeof(*v));
2440 	return raw_atomic64_fetch_andnot(i, v);
2441 }
2442 
2443 /**
2444  * atomic64_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
2445  * @i: s64 value
2446  * @v: pointer to atomic64_t
2447  *
2448  * Atomically updates @v to (@v & ~@i) with acquire ordering.
2449  *
2450  * Unsafe to use in noinstr code; use raw_atomic64_fetch_andnot_acquire() there.
2451  *
2452  * Return: The original value of @v.
2453  */
2454 static __always_inline s64
atomic64_fetch_andnot_acquire(s64 i,atomic64_t * v)2455 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
2456 {
2457 	instrument_atomic_read_write(v, sizeof(*v));
2458 	return raw_atomic64_fetch_andnot_acquire(i, v);
2459 }
2460 
2461 /**
2462  * atomic64_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
2463  * @i: s64 value
2464  * @v: pointer to atomic64_t
2465  *
2466  * Atomically updates @v to (@v & ~@i) with release ordering.
2467  *
2468  * Unsafe to use in noinstr code; use raw_atomic64_fetch_andnot_release() there.
2469  *
2470  * Return: The original value of @v.
2471  */
2472 static __always_inline s64
atomic64_fetch_andnot_release(s64 i,atomic64_t * v)2473 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
2474 {
2475 	kcsan_release();
2476 	instrument_atomic_read_write(v, sizeof(*v));
2477 	return raw_atomic64_fetch_andnot_release(i, v);
2478 }
2479 
2480 /**
2481  * atomic64_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
2482  * @i: s64 value
2483  * @v: pointer to atomic64_t
2484  *
2485  * Atomically updates @v to (@v & ~@i) with relaxed ordering.
2486  *
2487  * Unsafe to use in noinstr code; use raw_atomic64_fetch_andnot_relaxed() there.
2488  *
2489  * Return: The original value of @v.
2490  */
2491 static __always_inline s64
atomic64_fetch_andnot_relaxed(s64 i,atomic64_t * v)2492 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
2493 {
2494 	instrument_atomic_read_write(v, sizeof(*v));
2495 	return raw_atomic64_fetch_andnot_relaxed(i, v);
2496 }
2497 
2498 /**
2499  * atomic64_or() - atomic bitwise OR with relaxed ordering
2500  * @i: s64 value
2501  * @v: pointer to atomic64_t
2502  *
2503  * Atomically updates @v to (@v | @i) with relaxed ordering.
2504  *
2505  * Unsafe to use in noinstr code; use raw_atomic64_or() there.
2506  *
2507  * Return: Nothing.
2508  */
2509 static __always_inline void
atomic64_or(s64 i,atomic64_t * v)2510 atomic64_or(s64 i, atomic64_t *v)
2511 {
2512 	instrument_atomic_read_write(v, sizeof(*v));
2513 	raw_atomic64_or(i, v);
2514 }
2515 
2516 /**
2517  * atomic64_fetch_or() - atomic bitwise OR with full ordering
2518  * @i: s64 value
2519  * @v: pointer to atomic64_t
2520  *
2521  * Atomically updates @v to (@v | @i) with full ordering.
2522  *
2523  * Unsafe to use in noinstr code; use raw_atomic64_fetch_or() there.
2524  *
2525  * Return: The original value of @v.
2526  */
2527 static __always_inline s64
atomic64_fetch_or(s64 i,atomic64_t * v)2528 atomic64_fetch_or(s64 i, atomic64_t *v)
2529 {
2530 	kcsan_mb();
2531 	instrument_atomic_read_write(v, sizeof(*v));
2532 	return raw_atomic64_fetch_or(i, v);
2533 }
2534 
2535 /**
2536  * atomic64_fetch_or_acquire() - atomic bitwise OR with acquire ordering
2537  * @i: s64 value
2538  * @v: pointer to atomic64_t
2539  *
2540  * Atomically updates @v to (@v | @i) with acquire ordering.
2541  *
2542  * Unsafe to use in noinstr code; use raw_atomic64_fetch_or_acquire() there.
2543  *
2544  * Return: The original value of @v.
2545  */
2546 static __always_inline s64
atomic64_fetch_or_acquire(s64 i,atomic64_t * v)2547 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
2548 {
2549 	instrument_atomic_read_write(v, sizeof(*v));
2550 	return raw_atomic64_fetch_or_acquire(i, v);
2551 }
2552 
2553 /**
2554  * atomic64_fetch_or_release() - atomic bitwise OR with release ordering
2555  * @i: s64 value
2556  * @v: pointer to atomic64_t
2557  *
2558  * Atomically updates @v to (@v | @i) with release ordering.
2559  *
2560  * Unsafe to use in noinstr code; use raw_atomic64_fetch_or_release() there.
2561  *
2562  * Return: The original value of @v.
2563  */
2564 static __always_inline s64
atomic64_fetch_or_release(s64 i,atomic64_t * v)2565 atomic64_fetch_or_release(s64 i, atomic64_t *v)
2566 {
2567 	kcsan_release();
2568 	instrument_atomic_read_write(v, sizeof(*v));
2569 	return raw_atomic64_fetch_or_release(i, v);
2570 }
2571 
2572 /**
2573  * atomic64_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
2574  * @i: s64 value
2575  * @v: pointer to atomic64_t
2576  *
2577  * Atomically updates @v to (@v | @i) with relaxed ordering.
2578  *
2579  * Unsafe to use in noinstr code; use raw_atomic64_fetch_or_relaxed() there.
2580  *
2581  * Return: The original value of @v.
2582  */
2583 static __always_inline s64
atomic64_fetch_or_relaxed(s64 i,atomic64_t * v)2584 atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
2585 {
2586 	instrument_atomic_read_write(v, sizeof(*v));
2587 	return raw_atomic64_fetch_or_relaxed(i, v);
2588 }
2589 
2590 /**
2591  * atomic64_xor() - atomic bitwise XOR with relaxed ordering
2592  * @i: s64 value
2593  * @v: pointer to atomic64_t
2594  *
2595  * Atomically updates @v to (@v ^ @i) with relaxed ordering.
2596  *
2597  * Unsafe to use in noinstr code; use raw_atomic64_xor() there.
2598  *
2599  * Return: Nothing.
2600  */
2601 static __always_inline void
atomic64_xor(s64 i,atomic64_t * v)2602 atomic64_xor(s64 i, atomic64_t *v)
2603 {
2604 	instrument_atomic_read_write(v, sizeof(*v));
2605 	raw_atomic64_xor(i, v);
2606 }
2607 
2608 /**
2609  * atomic64_fetch_xor() - atomic bitwise XOR with full ordering
2610  * @i: s64 value
2611  * @v: pointer to atomic64_t
2612  *
2613  * Atomically updates @v to (@v ^ @i) with full ordering.
2614  *
2615  * Unsafe to use in noinstr code; use raw_atomic64_fetch_xor() there.
2616  *
2617  * Return: The original value of @v.
2618  */
2619 static __always_inline s64
atomic64_fetch_xor(s64 i,atomic64_t * v)2620 atomic64_fetch_xor(s64 i, atomic64_t *v)
2621 {
2622 	kcsan_mb();
2623 	instrument_atomic_read_write(v, sizeof(*v));
2624 	return raw_atomic64_fetch_xor(i, v);
2625 }
2626 
2627 /**
2628  * atomic64_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
2629  * @i: s64 value
2630  * @v: pointer to atomic64_t
2631  *
2632  * Atomically updates @v to (@v ^ @i) with acquire ordering.
2633  *
2634  * Unsafe to use in noinstr code; use raw_atomic64_fetch_xor_acquire() there.
2635  *
2636  * Return: The original value of @v.
2637  */
2638 static __always_inline s64
atomic64_fetch_xor_acquire(s64 i,atomic64_t * v)2639 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
2640 {
2641 	instrument_atomic_read_write(v, sizeof(*v));
2642 	return raw_atomic64_fetch_xor_acquire(i, v);
2643 }
2644 
2645 /**
2646  * atomic64_fetch_xor_release() - atomic bitwise XOR with release ordering
2647  * @i: s64 value
2648  * @v: pointer to atomic64_t
2649  *
2650  * Atomically updates @v to (@v ^ @i) with release ordering.
2651  *
2652  * Unsafe to use in noinstr code; use raw_atomic64_fetch_xor_release() there.
2653  *
2654  * Return: The original value of @v.
2655  */
2656 static __always_inline s64
atomic64_fetch_xor_release(s64 i,atomic64_t * v)2657 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2658 {
2659 	kcsan_release();
2660 	instrument_atomic_read_write(v, sizeof(*v));
2661 	return raw_atomic64_fetch_xor_release(i, v);
2662 }
2663 
2664 /**
2665  * atomic64_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
2666  * @i: s64 value
2667  * @v: pointer to atomic64_t
2668  *
2669  * Atomically updates @v to (@v ^ @i) with relaxed ordering.
2670  *
2671  * Unsafe to use in noinstr code; use raw_atomic64_fetch_xor_relaxed() there.
2672  *
2673  * Return: The original value of @v.
2674  */
2675 static __always_inline s64
atomic64_fetch_xor_relaxed(s64 i,atomic64_t * v)2676 atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
2677 {
2678 	instrument_atomic_read_write(v, sizeof(*v));
2679 	return raw_atomic64_fetch_xor_relaxed(i, v);
2680 }
2681 
2682 /**
2683  * atomic64_xchg() - atomic exchange with full ordering
2684  * @v: pointer to atomic64_t
2685  * @new: s64 value to assign
2686  *
2687  * Atomically updates @v to @new with full ordering.
2688  *
2689  * Unsafe to use in noinstr code; use raw_atomic64_xchg() there.
2690  *
2691  * Return: The original value of @v.
2692  */
2693 static __always_inline s64
atomic64_xchg(atomic64_t * v,s64 new)2694 atomic64_xchg(atomic64_t *v, s64 new)
2695 {
2696 	kcsan_mb();
2697 	instrument_atomic_read_write(v, sizeof(*v));
2698 	return raw_atomic64_xchg(v, new);
2699 }
2700 
2701 /**
2702  * atomic64_xchg_acquire() - atomic exchange with acquire ordering
2703  * @v: pointer to atomic64_t
2704  * @new: s64 value to assign
2705  *
2706  * Atomically updates @v to @new with acquire ordering.
2707  *
2708  * Unsafe to use in noinstr code; use raw_atomic64_xchg_acquire() there.
2709  *
2710  * Return: The original value of @v.
2711  */
2712 static __always_inline s64
atomic64_xchg_acquire(atomic64_t * v,s64 new)2713 atomic64_xchg_acquire(atomic64_t *v, s64 new)
2714 {
2715 	instrument_atomic_read_write(v, sizeof(*v));
2716 	return raw_atomic64_xchg_acquire(v, new);
2717 }
2718 
2719 /**
2720  * atomic64_xchg_release() - atomic exchange with release ordering
2721  * @v: pointer to atomic64_t
2722  * @new: s64 value to assign
2723  *
2724  * Atomically updates @v to @new with release ordering.
2725  *
2726  * Unsafe to use in noinstr code; use raw_atomic64_xchg_release() there.
2727  *
2728  * Return: The original value of @v.
2729  */
2730 static __always_inline s64
atomic64_xchg_release(atomic64_t * v,s64 new)2731 atomic64_xchg_release(atomic64_t *v, s64 new)
2732 {
2733 	kcsan_release();
2734 	instrument_atomic_read_write(v, sizeof(*v));
2735 	return raw_atomic64_xchg_release(v, new);
2736 }
2737 
2738 /**
2739  * atomic64_xchg_relaxed() - atomic exchange with relaxed ordering
2740  * @v: pointer to atomic64_t
2741  * @new: s64 value to assign
2742  *
2743  * Atomically updates @v to @new with relaxed ordering.
2744  *
2745  * Unsafe to use in noinstr code; use raw_atomic64_xchg_relaxed() there.
2746  *
2747  * Return: The original value of @v.
2748  */
2749 static __always_inline s64
atomic64_xchg_relaxed(atomic64_t * v,s64 new)2750 atomic64_xchg_relaxed(atomic64_t *v, s64 new)
2751 {
2752 	instrument_atomic_read_write(v, sizeof(*v));
2753 	return raw_atomic64_xchg_relaxed(v, new);
2754 }
2755 
2756 /**
2757  * atomic64_cmpxchg() - atomic compare and exchange with full ordering
2758  * @v: pointer to atomic64_t
2759  * @old: s64 value to compare with
2760  * @new: s64 value to assign
2761  *
2762  * If (@v == @old), atomically updates @v to @new with full ordering.
2763  * Otherwise, @v is not modified and relaxed ordering is provided.
2764  *
2765  * Unsafe to use in noinstr code; use raw_atomic64_cmpxchg() there.
2766  *
2767  * Return: The original value of @v.
2768  */
2769 static __always_inline s64
atomic64_cmpxchg(atomic64_t * v,s64 old,s64 new)2770 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2771 {
2772 	kcsan_mb();
2773 	instrument_atomic_read_write(v, sizeof(*v));
2774 	return raw_atomic64_cmpxchg(v, old, new);
2775 }
2776 
2777 /**
2778  * atomic64_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
2779  * @v: pointer to atomic64_t
2780  * @old: s64 value to compare with
2781  * @new: s64 value to assign
2782  *
2783  * If (@v == @old), atomically updates @v to @new with acquire ordering.
2784  * Otherwise, @v is not modified and relaxed ordering is provided.
2785  *
2786  * Unsafe to use in noinstr code; use raw_atomic64_cmpxchg_acquire() there.
2787  *
2788  * Return: The original value of @v.
2789  */
2790 static __always_inline s64
atomic64_cmpxchg_acquire(atomic64_t * v,s64 old,s64 new)2791 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2792 {
2793 	instrument_atomic_read_write(v, sizeof(*v));
2794 	return raw_atomic64_cmpxchg_acquire(v, old, new);
2795 }
2796 
2797 /**
2798  * atomic64_cmpxchg_release() - atomic compare and exchange with release ordering
2799  * @v: pointer to atomic64_t
2800  * @old: s64 value to compare with
2801  * @new: s64 value to assign
2802  *
2803  * If (@v == @old), atomically updates @v to @new with release ordering.
2804  * Otherwise, @v is not modified and relaxed ordering is provided.
2805  *
2806  * Unsafe to use in noinstr code; use raw_atomic64_cmpxchg_release() there.
2807  *
2808  * Return: The original value of @v.
2809  */
2810 static __always_inline s64
atomic64_cmpxchg_release(atomic64_t * v,s64 old,s64 new)2811 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2812 {
2813 	kcsan_release();
2814 	instrument_atomic_read_write(v, sizeof(*v));
2815 	return raw_atomic64_cmpxchg_release(v, old, new);
2816 }
2817 
2818 /**
2819  * atomic64_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
2820  * @v: pointer to atomic64_t
2821  * @old: s64 value to compare with
2822  * @new: s64 value to assign
2823  *
2824  * If (@v == @old), atomically updates @v to @new with relaxed ordering.
2825  * Otherwise, @v is not modified and relaxed ordering is provided.
2826  *
2827  * Unsafe to use in noinstr code; use raw_atomic64_cmpxchg_relaxed() there.
2828  *
2829  * Return: The original value of @v.
2830  */
2831 static __always_inline s64
atomic64_cmpxchg_relaxed(atomic64_t * v,s64 old,s64 new)2832 atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
2833 {
2834 	instrument_atomic_read_write(v, sizeof(*v));
2835 	return raw_atomic64_cmpxchg_relaxed(v, old, new);
2836 }
2837 
2838 /**
2839  * atomic64_try_cmpxchg() - atomic compare and exchange with full ordering
2840  * @v: pointer to atomic64_t
2841  * @old: pointer to s64 value to compare with
2842  * @new: s64 value to assign
2843  *
2844  * If (@v == @old), atomically updates @v to @new with full ordering.
2845  * Otherwise, @v is not modified, @old is updated to the current value of @v,
2846  * and relaxed ordering is provided.
2847  *
2848  * Unsafe to use in noinstr code; use raw_atomic64_try_cmpxchg() there.
2849  *
2850  * Return: @true if the exchange occured, @false otherwise.
2851  */
2852 static __always_inline bool
atomic64_try_cmpxchg(atomic64_t * v,s64 * old,s64 new)2853 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2854 {
2855 	kcsan_mb();
2856 	instrument_atomic_read_write(v, sizeof(*v));
2857 	instrument_atomic_read_write(old, sizeof(*old));
2858 	return raw_atomic64_try_cmpxchg(v, old, new);
2859 }
2860 
2861 /**
2862  * atomic64_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
2863  * @v: pointer to atomic64_t
2864  * @old: pointer to s64 value to compare with
2865  * @new: s64 value to assign
2866  *
2867  * If (@v == @old), atomically updates @v to @new with acquire ordering.
2868  * Otherwise, @v is not modified, @old is updated to the current value of @v,
2869  * and relaxed ordering is provided.
2870  *
2871  * Unsafe to use in noinstr code; use raw_atomic64_try_cmpxchg_acquire() there.
2872  *
2873  * Return: @true if the exchange occured, @false otherwise.
2874  */
2875 static __always_inline bool
atomic64_try_cmpxchg_acquire(atomic64_t * v,s64 * old,s64 new)2876 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2877 {
2878 	instrument_atomic_read_write(v, sizeof(*v));
2879 	instrument_atomic_read_write(old, sizeof(*old));
2880 	return raw_atomic64_try_cmpxchg_acquire(v, old, new);
2881 }
2882 
2883 /**
2884  * atomic64_try_cmpxchg_release() - atomic compare and exchange with release ordering
2885  * @v: pointer to atomic64_t
2886  * @old: pointer to s64 value to compare with
2887  * @new: s64 value to assign
2888  *
2889  * If (@v == @old), atomically updates @v to @new with release ordering.
2890  * Otherwise, @v is not modified, @old is updated to the current value of @v,
2891  * and relaxed ordering is provided.
2892  *
2893  * Unsafe to use in noinstr code; use raw_atomic64_try_cmpxchg_release() there.
2894  *
2895  * Return: @true if the exchange occured, @false otherwise.
2896  */
2897 static __always_inline bool
atomic64_try_cmpxchg_release(atomic64_t * v,s64 * old,s64 new)2898 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2899 {
2900 	kcsan_release();
2901 	instrument_atomic_read_write(v, sizeof(*v));
2902 	instrument_atomic_read_write(old, sizeof(*old));
2903 	return raw_atomic64_try_cmpxchg_release(v, old, new);
2904 }
2905 
2906 /**
2907  * atomic64_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
2908  * @v: pointer to atomic64_t
2909  * @old: pointer to s64 value to compare with
2910  * @new: s64 value to assign
2911  *
2912  * If (@v == @old), atomically updates @v to @new with relaxed ordering.
2913  * Otherwise, @v is not modified, @old is updated to the current value of @v,
2914  * and relaxed ordering is provided.
2915  *
2916  * Unsafe to use in noinstr code; use raw_atomic64_try_cmpxchg_relaxed() there.
2917  *
2918  * Return: @true if the exchange occured, @false otherwise.
2919  */
2920 static __always_inline bool
atomic64_try_cmpxchg_relaxed(atomic64_t * v,s64 * old,s64 new)2921 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2922 {
2923 	instrument_atomic_read_write(v, sizeof(*v));
2924 	instrument_atomic_read_write(old, sizeof(*old));
2925 	return raw_atomic64_try_cmpxchg_relaxed(v, old, new);
2926 }
2927 
2928 /**
2929  * atomic64_sub_and_test() - atomic subtract and test if zero with full ordering
2930  * @i: s64 value to add
2931  * @v: pointer to atomic64_t
2932  *
2933  * Atomically updates @v to (@v - @i) with full ordering.
2934  *
2935  * Unsafe to use in noinstr code; use raw_atomic64_sub_and_test() there.
2936  *
2937  * Return: @true if the resulting value of @v is zero, @false otherwise.
2938  */
2939 static __always_inline bool
atomic64_sub_and_test(s64 i,atomic64_t * v)2940 atomic64_sub_and_test(s64 i, atomic64_t *v)
2941 {
2942 	kcsan_mb();
2943 	instrument_atomic_read_write(v, sizeof(*v));
2944 	return raw_atomic64_sub_and_test(i, v);
2945 }
2946 
2947 /**
2948  * atomic64_dec_and_test() - atomic decrement and test if zero with full ordering
2949  * @v: pointer to atomic64_t
2950  *
2951  * Atomically updates @v to (@v - 1) with full ordering.
2952  *
2953  * Unsafe to use in noinstr code; use raw_atomic64_dec_and_test() there.
2954  *
2955  * Return: @true if the resulting value of @v is zero, @false otherwise.
2956  */
2957 static __always_inline bool
atomic64_dec_and_test(atomic64_t * v)2958 atomic64_dec_and_test(atomic64_t *v)
2959 {
2960 	kcsan_mb();
2961 	instrument_atomic_read_write(v, sizeof(*v));
2962 	return raw_atomic64_dec_and_test(v);
2963 }
2964 
2965 /**
2966  * atomic64_inc_and_test() - atomic increment and test if zero with full ordering
2967  * @v: pointer to atomic64_t
2968  *
2969  * Atomically updates @v to (@v + 1) with full ordering.
2970  *
2971  * Unsafe to use in noinstr code; use raw_atomic64_inc_and_test() there.
2972  *
2973  * Return: @true if the resulting value of @v is zero, @false otherwise.
2974  */
2975 static __always_inline bool
atomic64_inc_and_test(atomic64_t * v)2976 atomic64_inc_and_test(atomic64_t *v)
2977 {
2978 	kcsan_mb();
2979 	instrument_atomic_read_write(v, sizeof(*v));
2980 	return raw_atomic64_inc_and_test(v);
2981 }
2982 
2983 /**
2984  * atomic64_add_negative() - atomic add and test if negative with full ordering
2985  * @i: s64 value to add
2986  * @v: pointer to atomic64_t
2987  *
2988  * Atomically updates @v to (@v + @i) with full ordering.
2989  *
2990  * Unsafe to use in noinstr code; use raw_atomic64_add_negative() there.
2991  *
2992  * Return: @true if the resulting value of @v is negative, @false otherwise.
2993  */
2994 static __always_inline bool
atomic64_add_negative(s64 i,atomic64_t * v)2995 atomic64_add_negative(s64 i, atomic64_t *v)
2996 {
2997 	kcsan_mb();
2998 	instrument_atomic_read_write(v, sizeof(*v));
2999 	return raw_atomic64_add_negative(i, v);
3000 }
3001 
3002 /**
3003  * atomic64_add_negative_acquire() - atomic add and test if negative with acquire ordering
3004  * @i: s64 value to add
3005  * @v: pointer to atomic64_t
3006  *
3007  * Atomically updates @v to (@v + @i) with acquire ordering.
3008  *
3009  * Unsafe to use in noinstr code; use raw_atomic64_add_negative_acquire() there.
3010  *
3011  * Return: @true if the resulting value of @v is negative, @false otherwise.
3012  */
3013 static __always_inline bool
atomic64_add_negative_acquire(s64 i,atomic64_t * v)3014 atomic64_add_negative_acquire(s64 i, atomic64_t *v)
3015 {
3016 	instrument_atomic_read_write(v, sizeof(*v));
3017 	return raw_atomic64_add_negative_acquire(i, v);
3018 }
3019 
3020 /**
3021  * atomic64_add_negative_release() - atomic add and test if negative with release ordering
3022  * @i: s64 value to add
3023  * @v: pointer to atomic64_t
3024  *
3025  * Atomically updates @v to (@v + @i) with release ordering.
3026  *
3027  * Unsafe to use in noinstr code; use raw_atomic64_add_negative_release() there.
3028  *
3029  * Return: @true if the resulting value of @v is negative, @false otherwise.
3030  */
3031 static __always_inline bool
atomic64_add_negative_release(s64 i,atomic64_t * v)3032 atomic64_add_negative_release(s64 i, atomic64_t *v)
3033 {
3034 	kcsan_release();
3035 	instrument_atomic_read_write(v, sizeof(*v));
3036 	return raw_atomic64_add_negative_release(i, v);
3037 }
3038 
3039 /**
3040  * atomic64_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
3041  * @i: s64 value to add
3042  * @v: pointer to atomic64_t
3043  *
3044  * Atomically updates @v to (@v + @i) with relaxed ordering.
3045  *
3046  * Unsafe to use in noinstr code; use raw_atomic64_add_negative_relaxed() there.
3047  *
3048  * Return: @true if the resulting value of @v is negative, @false otherwise.
3049  */
3050 static __always_inline bool
atomic64_add_negative_relaxed(s64 i,atomic64_t * v)3051 atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
3052 {
3053 	instrument_atomic_read_write(v, sizeof(*v));
3054 	return raw_atomic64_add_negative_relaxed(i, v);
3055 }
3056 
3057 /**
3058  * atomic64_fetch_add_unless() - atomic add unless value with full ordering
3059  * @v: pointer to atomic64_t
3060  * @a: s64 value to add
3061  * @u: s64 value to compare with
3062  *
3063  * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
3064  * Otherwise, @v is not modified and relaxed ordering is provided.
3065  *
3066  * Unsafe to use in noinstr code; use raw_atomic64_fetch_add_unless() there.
3067  *
3068  * Return: The original value of @v.
3069  */
3070 static __always_inline s64
atomic64_fetch_add_unless(atomic64_t * v,s64 a,s64 u)3071 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
3072 {
3073 	kcsan_mb();
3074 	instrument_atomic_read_write(v, sizeof(*v));
3075 	return raw_atomic64_fetch_add_unless(v, a, u);
3076 }
3077 
3078 /**
3079  * atomic64_add_unless() - atomic add unless value with full ordering
3080  * @v: pointer to atomic64_t
3081  * @a: s64 value to add
3082  * @u: s64 value to compare with
3083  *
3084  * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
3085  * Otherwise, @v is not modified and relaxed ordering is provided.
3086  *
3087  * Unsafe to use in noinstr code; use raw_atomic64_add_unless() there.
3088  *
3089  * Return: @true if @v was updated, @false otherwise.
3090  */
3091 static __always_inline bool
atomic64_add_unless(atomic64_t * v,s64 a,s64 u)3092 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
3093 {
3094 	kcsan_mb();
3095 	instrument_atomic_read_write(v, sizeof(*v));
3096 	return raw_atomic64_add_unless(v, a, u);
3097 }
3098 
3099 /**
3100  * atomic64_inc_not_zero() - atomic increment unless zero with full ordering
3101  * @v: pointer to atomic64_t
3102  *
3103  * If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
3104  * Otherwise, @v is not modified and relaxed ordering is provided.
3105  *
3106  * Unsafe to use in noinstr code; use raw_atomic64_inc_not_zero() there.
3107  *
3108  * Return: @true if @v was updated, @false otherwise.
3109  */
3110 static __always_inline bool
atomic64_inc_not_zero(atomic64_t * v)3111 atomic64_inc_not_zero(atomic64_t *v)
3112 {
3113 	kcsan_mb();
3114 	instrument_atomic_read_write(v, sizeof(*v));
3115 	return raw_atomic64_inc_not_zero(v);
3116 }
3117 
3118 /**
3119  * atomic64_inc_unless_negative() - atomic increment unless negative with full ordering
3120  * @v: pointer to atomic64_t
3121  *
3122  * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
3123  * Otherwise, @v is not modified and relaxed ordering is provided.
3124  *
3125  * Unsafe to use in noinstr code; use raw_atomic64_inc_unless_negative() there.
3126  *
3127  * Return: @true if @v was updated, @false otherwise.
3128  */
3129 static __always_inline bool
atomic64_inc_unless_negative(atomic64_t * v)3130 atomic64_inc_unless_negative(atomic64_t *v)
3131 {
3132 	kcsan_mb();
3133 	instrument_atomic_read_write(v, sizeof(*v));
3134 	return raw_atomic64_inc_unless_negative(v);
3135 }
3136 
3137 /**
3138  * atomic64_dec_unless_positive() - atomic decrement unless positive with full ordering
3139  * @v: pointer to atomic64_t
3140  *
3141  * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
3142  * Otherwise, @v is not modified and relaxed ordering is provided.
3143  *
3144  * Unsafe to use in noinstr code; use raw_atomic64_dec_unless_positive() there.
3145  *
3146  * Return: @true if @v was updated, @false otherwise.
3147  */
3148 static __always_inline bool
atomic64_dec_unless_positive(atomic64_t * v)3149 atomic64_dec_unless_positive(atomic64_t *v)
3150 {
3151 	kcsan_mb();
3152 	instrument_atomic_read_write(v, sizeof(*v));
3153 	return raw_atomic64_dec_unless_positive(v);
3154 }
3155 
3156 /**
3157  * atomic64_dec_if_positive() - atomic decrement if positive with full ordering
3158  * @v: pointer to atomic64_t
3159  *
3160  * If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
3161  * Otherwise, @v is not modified and relaxed ordering is provided.
3162  *
3163  * Unsafe to use in noinstr code; use raw_atomic64_dec_if_positive() there.
3164  *
3165  * Return: The old value of (@v - 1), regardless of whether @v was updated.
3166  */
3167 static __always_inline s64
atomic64_dec_if_positive(atomic64_t * v)3168 atomic64_dec_if_positive(atomic64_t *v)
3169 {
3170 	kcsan_mb();
3171 	instrument_atomic_read_write(v, sizeof(*v));
3172 	return raw_atomic64_dec_if_positive(v);
3173 }
3174 
3175 /**
3176  * atomic_long_read() - atomic load with relaxed ordering
3177  * @v: pointer to atomic_long_t
3178  *
3179  * Atomically loads the value of @v with relaxed ordering.
3180  *
3181  * Unsafe to use in noinstr code; use raw_atomic_long_read() there.
3182  *
3183  * Return: The value loaded from @v.
3184  */
3185 static __always_inline long
atomic_long_read(const atomic_long_t * v)3186 atomic_long_read(const atomic_long_t *v)
3187 {
3188 	instrument_atomic_read(v, sizeof(*v));
3189 	return raw_atomic_long_read(v);
3190 }
3191 
3192 /**
3193  * atomic_long_read_acquire() - atomic load with acquire ordering
3194  * @v: pointer to atomic_long_t
3195  *
3196  * Atomically loads the value of @v with acquire ordering.
3197  *
3198  * Unsafe to use in noinstr code; use raw_atomic_long_read_acquire() there.
3199  *
3200  * Return: The value loaded from @v.
3201  */
3202 static __always_inline long
atomic_long_read_acquire(const atomic_long_t * v)3203 atomic_long_read_acquire(const atomic_long_t *v)
3204 {
3205 	instrument_atomic_read(v, sizeof(*v));
3206 	return raw_atomic_long_read_acquire(v);
3207 }
3208 
3209 /**
3210  * atomic_long_set() - atomic set with relaxed ordering
3211  * @v: pointer to atomic_long_t
3212  * @i: long value to assign
3213  *
3214  * Atomically sets @v to @i with relaxed ordering.
3215  *
3216  * Unsafe to use in noinstr code; use raw_atomic_long_set() there.
3217  *
3218  * Return: Nothing.
3219  */
3220 static __always_inline void
atomic_long_set(atomic_long_t * v,long i)3221 atomic_long_set(atomic_long_t *v, long i)
3222 {
3223 	instrument_atomic_write(v, sizeof(*v));
3224 	raw_atomic_long_set(v, i);
3225 }
3226 
3227 /**
3228  * atomic_long_set_release() - atomic set with release ordering
3229  * @v: pointer to atomic_long_t
3230  * @i: long value to assign
3231  *
3232  * Atomically sets @v to @i with release ordering.
3233  *
3234  * Unsafe to use in noinstr code; use raw_atomic_long_set_release() there.
3235  *
3236  * Return: Nothing.
3237  */
3238 static __always_inline void
atomic_long_set_release(atomic_long_t * v,long i)3239 atomic_long_set_release(atomic_long_t *v, long i)
3240 {
3241 	kcsan_release();
3242 	instrument_atomic_write(v, sizeof(*v));
3243 	raw_atomic_long_set_release(v, i);
3244 }
3245 
3246 /**
3247  * atomic_long_add() - atomic add with relaxed ordering
3248  * @i: long value to add
3249  * @v: pointer to atomic_long_t
3250  *
3251  * Atomically updates @v to (@v + @i) with relaxed ordering.
3252  *
3253  * Unsafe to use in noinstr code; use raw_atomic_long_add() there.
3254  *
3255  * Return: Nothing.
3256  */
3257 static __always_inline void
atomic_long_add(long i,atomic_long_t * v)3258 atomic_long_add(long i, atomic_long_t *v)
3259 {
3260 	instrument_atomic_read_write(v, sizeof(*v));
3261 	raw_atomic_long_add(i, v);
3262 }
3263 
3264 /**
3265  * atomic_long_add_return() - atomic add with full ordering
3266  * @i: long value to add
3267  * @v: pointer to atomic_long_t
3268  *
3269  * Atomically updates @v to (@v + @i) with full ordering.
3270  *
3271  * Unsafe to use in noinstr code; use raw_atomic_long_add_return() there.
3272  *
3273  * Return: The updated value of @v.
3274  */
3275 static __always_inline long
atomic_long_add_return(long i,atomic_long_t * v)3276 atomic_long_add_return(long i, atomic_long_t *v)
3277 {
3278 	kcsan_mb();
3279 	instrument_atomic_read_write(v, sizeof(*v));
3280 	return raw_atomic_long_add_return(i, v);
3281 }
3282 
3283 /**
3284  * atomic_long_add_return_acquire() - atomic add with acquire ordering
3285  * @i: long value to add
3286  * @v: pointer to atomic_long_t
3287  *
3288  * Atomically updates @v to (@v + @i) with acquire ordering.
3289  *
3290  * Unsafe to use in noinstr code; use raw_atomic_long_add_return_acquire() there.
3291  *
3292  * Return: The updated value of @v.
3293  */
3294 static __always_inline long
atomic_long_add_return_acquire(long i,atomic_long_t * v)3295 atomic_long_add_return_acquire(long i, atomic_long_t *v)
3296 {
3297 	instrument_atomic_read_write(v, sizeof(*v));
3298 	return raw_atomic_long_add_return_acquire(i, v);
3299 }
3300 
3301 /**
3302  * atomic_long_add_return_release() - atomic add with release ordering
3303  * @i: long value to add
3304  * @v: pointer to atomic_long_t
3305  *
3306  * Atomically updates @v to (@v + @i) with release ordering.
3307  *
3308  * Unsafe to use in noinstr code; use raw_atomic_long_add_return_release() there.
3309  *
3310  * Return: The updated value of @v.
3311  */
3312 static __always_inline long
atomic_long_add_return_release(long i,atomic_long_t * v)3313 atomic_long_add_return_release(long i, atomic_long_t *v)
3314 {
3315 	kcsan_release();
3316 	instrument_atomic_read_write(v, sizeof(*v));
3317 	return raw_atomic_long_add_return_release(i, v);
3318 }
3319 
3320 /**
3321  * atomic_long_add_return_relaxed() - atomic add with relaxed ordering
3322  * @i: long value to add
3323  * @v: pointer to atomic_long_t
3324  *
3325  * Atomically updates @v to (@v + @i) with relaxed ordering.
3326  *
3327  * Unsafe to use in noinstr code; use raw_atomic_long_add_return_relaxed() there.
3328  *
3329  * Return: The updated value of @v.
3330  */
3331 static __always_inline long
atomic_long_add_return_relaxed(long i,atomic_long_t * v)3332 atomic_long_add_return_relaxed(long i, atomic_long_t *v)
3333 {
3334 	instrument_atomic_read_write(v, sizeof(*v));
3335 	return raw_atomic_long_add_return_relaxed(i, v);
3336 }
3337 
3338 /**
3339  * atomic_long_fetch_add() - atomic add with full ordering
3340  * @i: long value to add
3341  * @v: pointer to atomic_long_t
3342  *
3343  * Atomically updates @v to (@v + @i) with full ordering.
3344  *
3345  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add() there.
3346  *
3347  * Return: The original value of @v.
3348  */
3349 static __always_inline long
atomic_long_fetch_add(long i,atomic_long_t * v)3350 atomic_long_fetch_add(long i, atomic_long_t *v)
3351 {
3352 	kcsan_mb();
3353 	instrument_atomic_read_write(v, sizeof(*v));
3354 	return raw_atomic_long_fetch_add(i, v);
3355 }
3356 
3357 /**
3358  * atomic_long_fetch_add_acquire() - atomic add with acquire ordering
3359  * @i: long value to add
3360  * @v: pointer to atomic_long_t
3361  *
3362  * Atomically updates @v to (@v + @i) with acquire ordering.
3363  *
3364  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add_acquire() there.
3365  *
3366  * Return: The original value of @v.
3367  */
3368 static __always_inline long
atomic_long_fetch_add_acquire(long i,atomic_long_t * v)3369 atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
3370 {
3371 	instrument_atomic_read_write(v, sizeof(*v));
3372 	return raw_atomic_long_fetch_add_acquire(i, v);
3373 }
3374 
3375 /**
3376  * atomic_long_fetch_add_release() - atomic add with release ordering
3377  * @i: long value to add
3378  * @v: pointer to atomic_long_t
3379  *
3380  * Atomically updates @v to (@v + @i) with release ordering.
3381  *
3382  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add_release() there.
3383  *
3384  * Return: The original value of @v.
3385  */
3386 static __always_inline long
atomic_long_fetch_add_release(long i,atomic_long_t * v)3387 atomic_long_fetch_add_release(long i, atomic_long_t *v)
3388 {
3389 	kcsan_release();
3390 	instrument_atomic_read_write(v, sizeof(*v));
3391 	return raw_atomic_long_fetch_add_release(i, v);
3392 }
3393 
3394 /**
3395  * atomic_long_fetch_add_relaxed() - atomic add with relaxed ordering
3396  * @i: long value to add
3397  * @v: pointer to atomic_long_t
3398  *
3399  * Atomically updates @v to (@v + @i) with relaxed ordering.
3400  *
3401  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add_relaxed() there.
3402  *
3403  * Return: The original value of @v.
3404  */
3405 static __always_inline long
atomic_long_fetch_add_relaxed(long i,atomic_long_t * v)3406 atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
3407 {
3408 	instrument_atomic_read_write(v, sizeof(*v));
3409 	return raw_atomic_long_fetch_add_relaxed(i, v);
3410 }
3411 
3412 /**
3413  * atomic_long_sub() - atomic subtract with relaxed ordering
3414  * @i: long value to subtract
3415  * @v: pointer to atomic_long_t
3416  *
3417  * Atomically updates @v to (@v - @i) with relaxed ordering.
3418  *
3419  * Unsafe to use in noinstr code; use raw_atomic_long_sub() there.
3420  *
3421  * Return: Nothing.
3422  */
3423 static __always_inline void
atomic_long_sub(long i,atomic_long_t * v)3424 atomic_long_sub(long i, atomic_long_t *v)
3425 {
3426 	instrument_atomic_read_write(v, sizeof(*v));
3427 	raw_atomic_long_sub(i, v);
3428 }
3429 
3430 /**
3431  * atomic_long_sub_return() - atomic subtract with full ordering
3432  * @i: long value to subtract
3433  * @v: pointer to atomic_long_t
3434  *
3435  * Atomically updates @v to (@v - @i) with full ordering.
3436  *
3437  * Unsafe to use in noinstr code; use raw_atomic_long_sub_return() there.
3438  *
3439  * Return: The updated value of @v.
3440  */
3441 static __always_inline long
atomic_long_sub_return(long i,atomic_long_t * v)3442 atomic_long_sub_return(long i, atomic_long_t *v)
3443 {
3444 	kcsan_mb();
3445 	instrument_atomic_read_write(v, sizeof(*v));
3446 	return raw_atomic_long_sub_return(i, v);
3447 }
3448 
3449 /**
3450  * atomic_long_sub_return_acquire() - atomic subtract with acquire ordering
3451  * @i: long value to subtract
3452  * @v: pointer to atomic_long_t
3453  *
3454  * Atomically updates @v to (@v - @i) with acquire ordering.
3455  *
3456  * Unsafe to use in noinstr code; use raw_atomic_long_sub_return_acquire() there.
3457  *
3458  * Return: The updated value of @v.
3459  */
3460 static __always_inline long
atomic_long_sub_return_acquire(long i,atomic_long_t * v)3461 atomic_long_sub_return_acquire(long i, atomic_long_t *v)
3462 {
3463 	instrument_atomic_read_write(v, sizeof(*v));
3464 	return raw_atomic_long_sub_return_acquire(i, v);
3465 }
3466 
3467 /**
3468  * atomic_long_sub_return_release() - atomic subtract with release ordering
3469  * @i: long value to subtract
3470  * @v: pointer to atomic_long_t
3471  *
3472  * Atomically updates @v to (@v - @i) with release ordering.
3473  *
3474  * Unsafe to use in noinstr code; use raw_atomic_long_sub_return_release() there.
3475  *
3476  * Return: The updated value of @v.
3477  */
3478 static __always_inline long
atomic_long_sub_return_release(long i,atomic_long_t * v)3479 atomic_long_sub_return_release(long i, atomic_long_t *v)
3480 {
3481 	kcsan_release();
3482 	instrument_atomic_read_write(v, sizeof(*v));
3483 	return raw_atomic_long_sub_return_release(i, v);
3484 }
3485 
3486 /**
3487  * atomic_long_sub_return_relaxed() - atomic subtract with relaxed ordering
3488  * @i: long value to subtract
3489  * @v: pointer to atomic_long_t
3490  *
3491  * Atomically updates @v to (@v - @i) with relaxed ordering.
3492  *
3493  * Unsafe to use in noinstr code; use raw_atomic_long_sub_return_relaxed() there.
3494  *
3495  * Return: The updated value of @v.
3496  */
3497 static __always_inline long
atomic_long_sub_return_relaxed(long i,atomic_long_t * v)3498 atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
3499 {
3500 	instrument_atomic_read_write(v, sizeof(*v));
3501 	return raw_atomic_long_sub_return_relaxed(i, v);
3502 }
3503 
3504 /**
3505  * atomic_long_fetch_sub() - atomic subtract with full ordering
3506  * @i: long value to subtract
3507  * @v: pointer to atomic_long_t
3508  *
3509  * Atomically updates @v to (@v - @i) with full ordering.
3510  *
3511  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_sub() there.
3512  *
3513  * Return: The original value of @v.
3514  */
3515 static __always_inline long
atomic_long_fetch_sub(long i,atomic_long_t * v)3516 atomic_long_fetch_sub(long i, atomic_long_t *v)
3517 {
3518 	kcsan_mb();
3519 	instrument_atomic_read_write(v, sizeof(*v));
3520 	return raw_atomic_long_fetch_sub(i, v);
3521 }
3522 
3523 /**
3524  * atomic_long_fetch_sub_acquire() - atomic subtract with acquire ordering
3525  * @i: long value to subtract
3526  * @v: pointer to atomic_long_t
3527  *
3528  * Atomically updates @v to (@v - @i) with acquire ordering.
3529  *
3530  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_sub_acquire() there.
3531  *
3532  * Return: The original value of @v.
3533  */
3534 static __always_inline long
atomic_long_fetch_sub_acquire(long i,atomic_long_t * v)3535 atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
3536 {
3537 	instrument_atomic_read_write(v, sizeof(*v));
3538 	return raw_atomic_long_fetch_sub_acquire(i, v);
3539 }
3540 
3541 /**
3542  * atomic_long_fetch_sub_release() - atomic subtract with release ordering
3543  * @i: long value to subtract
3544  * @v: pointer to atomic_long_t
3545  *
3546  * Atomically updates @v to (@v - @i) with release ordering.
3547  *
3548  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_sub_release() there.
3549  *
3550  * Return: The original value of @v.
3551  */
3552 static __always_inline long
atomic_long_fetch_sub_release(long i,atomic_long_t * v)3553 atomic_long_fetch_sub_release(long i, atomic_long_t *v)
3554 {
3555 	kcsan_release();
3556 	instrument_atomic_read_write(v, sizeof(*v));
3557 	return raw_atomic_long_fetch_sub_release(i, v);
3558 }
3559 
3560 /**
3561  * atomic_long_fetch_sub_relaxed() - atomic subtract with relaxed ordering
3562  * @i: long value to subtract
3563  * @v: pointer to atomic_long_t
3564  *
3565  * Atomically updates @v to (@v - @i) with relaxed ordering.
3566  *
3567  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_sub_relaxed() there.
3568  *
3569  * Return: The original value of @v.
3570  */
3571 static __always_inline long
atomic_long_fetch_sub_relaxed(long i,atomic_long_t * v)3572 atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
3573 {
3574 	instrument_atomic_read_write(v, sizeof(*v));
3575 	return raw_atomic_long_fetch_sub_relaxed(i, v);
3576 }
3577 
3578 /**
3579  * atomic_long_inc() - atomic increment with relaxed ordering
3580  * @v: pointer to atomic_long_t
3581  *
3582  * Atomically updates @v to (@v + 1) with relaxed ordering.
3583  *
3584  * Unsafe to use in noinstr code; use raw_atomic_long_inc() there.
3585  *
3586  * Return: Nothing.
3587  */
3588 static __always_inline void
atomic_long_inc(atomic_long_t * v)3589 atomic_long_inc(atomic_long_t *v)
3590 {
3591 	instrument_atomic_read_write(v, sizeof(*v));
3592 	raw_atomic_long_inc(v);
3593 }
3594 
3595 /**
3596  * atomic_long_inc_return() - atomic increment with full ordering
3597  * @v: pointer to atomic_long_t
3598  *
3599  * Atomically updates @v to (@v + 1) with full ordering.
3600  *
3601  * Unsafe to use in noinstr code; use raw_atomic_long_inc_return() there.
3602  *
3603  * Return: The updated value of @v.
3604  */
3605 static __always_inline long
atomic_long_inc_return(atomic_long_t * v)3606 atomic_long_inc_return(atomic_long_t *v)
3607 {
3608 	kcsan_mb();
3609 	instrument_atomic_read_write(v, sizeof(*v));
3610 	return raw_atomic_long_inc_return(v);
3611 }
3612 
3613 /**
3614  * atomic_long_inc_return_acquire() - atomic increment with acquire ordering
3615  * @v: pointer to atomic_long_t
3616  *
3617  * Atomically updates @v to (@v + 1) with acquire ordering.
3618  *
3619  * Unsafe to use in noinstr code; use raw_atomic_long_inc_return_acquire() there.
3620  *
3621  * Return: The updated value of @v.
3622  */
3623 static __always_inline long
atomic_long_inc_return_acquire(atomic_long_t * v)3624 atomic_long_inc_return_acquire(atomic_long_t *v)
3625 {
3626 	instrument_atomic_read_write(v, sizeof(*v));
3627 	return raw_atomic_long_inc_return_acquire(v);
3628 }
3629 
3630 /**
3631  * atomic_long_inc_return_release() - atomic increment with release ordering
3632  * @v: pointer to atomic_long_t
3633  *
3634  * Atomically updates @v to (@v + 1) with release ordering.
3635  *
3636  * Unsafe to use in noinstr code; use raw_atomic_long_inc_return_release() there.
3637  *
3638  * Return: The updated value of @v.
3639  */
3640 static __always_inline long
atomic_long_inc_return_release(atomic_long_t * v)3641 atomic_long_inc_return_release(atomic_long_t *v)
3642 {
3643 	kcsan_release();
3644 	instrument_atomic_read_write(v, sizeof(*v));
3645 	return raw_atomic_long_inc_return_release(v);
3646 }
3647 
3648 /**
3649  * atomic_long_inc_return_relaxed() - atomic increment with relaxed ordering
3650  * @v: pointer to atomic_long_t
3651  *
3652  * Atomically updates @v to (@v + 1) with relaxed ordering.
3653  *
3654  * Unsafe to use in noinstr code; use raw_atomic_long_inc_return_relaxed() there.
3655  *
3656  * Return: The updated value of @v.
3657  */
3658 static __always_inline long
atomic_long_inc_return_relaxed(atomic_long_t * v)3659 atomic_long_inc_return_relaxed(atomic_long_t *v)
3660 {
3661 	instrument_atomic_read_write(v, sizeof(*v));
3662 	return raw_atomic_long_inc_return_relaxed(v);
3663 }
3664 
3665 /**
3666  * atomic_long_fetch_inc() - atomic increment with full ordering
3667  * @v: pointer to atomic_long_t
3668  *
3669  * Atomically updates @v to (@v + 1) with full ordering.
3670  *
3671  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_inc() there.
3672  *
3673  * Return: The original value of @v.
3674  */
3675 static __always_inline long
atomic_long_fetch_inc(atomic_long_t * v)3676 atomic_long_fetch_inc(atomic_long_t *v)
3677 {
3678 	kcsan_mb();
3679 	instrument_atomic_read_write(v, sizeof(*v));
3680 	return raw_atomic_long_fetch_inc(v);
3681 }
3682 
3683 /**
3684  * atomic_long_fetch_inc_acquire() - atomic increment with acquire ordering
3685  * @v: pointer to atomic_long_t
3686  *
3687  * Atomically updates @v to (@v + 1) with acquire ordering.
3688  *
3689  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_inc_acquire() there.
3690  *
3691  * Return: The original value of @v.
3692  */
3693 static __always_inline long
atomic_long_fetch_inc_acquire(atomic_long_t * v)3694 atomic_long_fetch_inc_acquire(atomic_long_t *v)
3695 {
3696 	instrument_atomic_read_write(v, sizeof(*v));
3697 	return raw_atomic_long_fetch_inc_acquire(v);
3698 }
3699 
3700 /**
3701  * atomic_long_fetch_inc_release() - atomic increment with release ordering
3702  * @v: pointer to atomic_long_t
3703  *
3704  * Atomically updates @v to (@v + 1) with release ordering.
3705  *
3706  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_inc_release() there.
3707  *
3708  * Return: The original value of @v.
3709  */
3710 static __always_inline long
atomic_long_fetch_inc_release(atomic_long_t * v)3711 atomic_long_fetch_inc_release(atomic_long_t *v)
3712 {
3713 	kcsan_release();
3714 	instrument_atomic_read_write(v, sizeof(*v));
3715 	return raw_atomic_long_fetch_inc_release(v);
3716 }
3717 
3718 /**
3719  * atomic_long_fetch_inc_relaxed() - atomic increment with relaxed ordering
3720  * @v: pointer to atomic_long_t
3721  *
3722  * Atomically updates @v to (@v + 1) with relaxed ordering.
3723  *
3724  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_inc_relaxed() there.
3725  *
3726  * Return: The original value of @v.
3727  */
3728 static __always_inline long
atomic_long_fetch_inc_relaxed(atomic_long_t * v)3729 atomic_long_fetch_inc_relaxed(atomic_long_t *v)
3730 {
3731 	instrument_atomic_read_write(v, sizeof(*v));
3732 	return raw_atomic_long_fetch_inc_relaxed(v);
3733 }
3734 
3735 /**
3736  * atomic_long_dec() - atomic decrement with relaxed ordering
3737  * @v: pointer to atomic_long_t
3738  *
3739  * Atomically updates @v to (@v - 1) with relaxed ordering.
3740  *
3741  * Unsafe to use in noinstr code; use raw_atomic_long_dec() there.
3742  *
3743  * Return: Nothing.
3744  */
3745 static __always_inline void
atomic_long_dec(atomic_long_t * v)3746 atomic_long_dec(atomic_long_t *v)
3747 {
3748 	instrument_atomic_read_write(v, sizeof(*v));
3749 	raw_atomic_long_dec(v);
3750 }
3751 
3752 /**
3753  * atomic_long_dec_return() - atomic decrement with full ordering
3754  * @v: pointer to atomic_long_t
3755  *
3756  * Atomically updates @v to (@v - 1) with full ordering.
3757  *
3758  * Unsafe to use in noinstr code; use raw_atomic_long_dec_return() there.
3759  *
3760  * Return: The updated value of @v.
3761  */
3762 static __always_inline long
atomic_long_dec_return(atomic_long_t * v)3763 atomic_long_dec_return(atomic_long_t *v)
3764 {
3765 	kcsan_mb();
3766 	instrument_atomic_read_write(v, sizeof(*v));
3767 	return raw_atomic_long_dec_return(v);
3768 }
3769 
3770 /**
3771  * atomic_long_dec_return_acquire() - atomic decrement with acquire ordering
3772  * @v: pointer to atomic_long_t
3773  *
3774  * Atomically updates @v to (@v - 1) with acquire ordering.
3775  *
3776  * Unsafe to use in noinstr code; use raw_atomic_long_dec_return_acquire() there.
3777  *
3778  * Return: The updated value of @v.
3779  */
3780 static __always_inline long
atomic_long_dec_return_acquire(atomic_long_t * v)3781 atomic_long_dec_return_acquire(atomic_long_t *v)
3782 {
3783 	instrument_atomic_read_write(v, sizeof(*v));
3784 	return raw_atomic_long_dec_return_acquire(v);
3785 }
3786 
3787 /**
3788  * atomic_long_dec_return_release() - atomic decrement with release ordering
3789  * @v: pointer to atomic_long_t
3790  *
3791  * Atomically updates @v to (@v - 1) with release ordering.
3792  *
3793  * Unsafe to use in noinstr code; use raw_atomic_long_dec_return_release() there.
3794  *
3795  * Return: The updated value of @v.
3796  */
3797 static __always_inline long
atomic_long_dec_return_release(atomic_long_t * v)3798 atomic_long_dec_return_release(atomic_long_t *v)
3799 {
3800 	kcsan_release();
3801 	instrument_atomic_read_write(v, sizeof(*v));
3802 	return raw_atomic_long_dec_return_release(v);
3803 }
3804 
3805 /**
3806  * atomic_long_dec_return_relaxed() - atomic decrement with relaxed ordering
3807  * @v: pointer to atomic_long_t
3808  *
3809  * Atomically updates @v to (@v - 1) with relaxed ordering.
3810  *
3811  * Unsafe to use in noinstr code; use raw_atomic_long_dec_return_relaxed() there.
3812  *
3813  * Return: The updated value of @v.
3814  */
3815 static __always_inline long
atomic_long_dec_return_relaxed(atomic_long_t * v)3816 atomic_long_dec_return_relaxed(atomic_long_t *v)
3817 {
3818 	instrument_atomic_read_write(v, sizeof(*v));
3819 	return raw_atomic_long_dec_return_relaxed(v);
3820 }
3821 
3822 /**
3823  * atomic_long_fetch_dec() - atomic decrement with full ordering
3824  * @v: pointer to atomic_long_t
3825  *
3826  * Atomically updates @v to (@v - 1) with full ordering.
3827  *
3828  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_dec() there.
3829  *
3830  * Return: The original value of @v.
3831  */
3832 static __always_inline long
atomic_long_fetch_dec(atomic_long_t * v)3833 atomic_long_fetch_dec(atomic_long_t *v)
3834 {
3835 	kcsan_mb();
3836 	instrument_atomic_read_write(v, sizeof(*v));
3837 	return raw_atomic_long_fetch_dec(v);
3838 }
3839 
3840 /**
3841  * atomic_long_fetch_dec_acquire() - atomic decrement with acquire ordering
3842  * @v: pointer to atomic_long_t
3843  *
3844  * Atomically updates @v to (@v - 1) with acquire ordering.
3845  *
3846  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_dec_acquire() there.
3847  *
3848  * Return: The original value of @v.
3849  */
3850 static __always_inline long
atomic_long_fetch_dec_acquire(atomic_long_t * v)3851 atomic_long_fetch_dec_acquire(atomic_long_t *v)
3852 {
3853 	instrument_atomic_read_write(v, sizeof(*v));
3854 	return raw_atomic_long_fetch_dec_acquire(v);
3855 }
3856 
3857 /**
3858  * atomic_long_fetch_dec_release() - atomic decrement with release ordering
3859  * @v: pointer to atomic_long_t
3860  *
3861  * Atomically updates @v to (@v - 1) with release ordering.
3862  *
3863  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_dec_release() there.
3864  *
3865  * Return: The original value of @v.
3866  */
3867 static __always_inline long
atomic_long_fetch_dec_release(atomic_long_t * v)3868 atomic_long_fetch_dec_release(atomic_long_t *v)
3869 {
3870 	kcsan_release();
3871 	instrument_atomic_read_write(v, sizeof(*v));
3872 	return raw_atomic_long_fetch_dec_release(v);
3873 }
3874 
3875 /**
3876  * atomic_long_fetch_dec_relaxed() - atomic decrement with relaxed ordering
3877  * @v: pointer to atomic_long_t
3878  *
3879  * Atomically updates @v to (@v - 1) with relaxed ordering.
3880  *
3881  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_dec_relaxed() there.
3882  *
3883  * Return: The original value of @v.
3884  */
3885 static __always_inline long
atomic_long_fetch_dec_relaxed(atomic_long_t * v)3886 atomic_long_fetch_dec_relaxed(atomic_long_t *v)
3887 {
3888 	instrument_atomic_read_write(v, sizeof(*v));
3889 	return raw_atomic_long_fetch_dec_relaxed(v);
3890 }
3891 
3892 /**
3893  * atomic_long_and() - atomic bitwise AND with relaxed ordering
3894  * @i: long value
3895  * @v: pointer to atomic_long_t
3896  *
3897  * Atomically updates @v to (@v & @i) with relaxed ordering.
3898  *
3899  * Unsafe to use in noinstr code; use raw_atomic_long_and() there.
3900  *
3901  * Return: Nothing.
3902  */
3903 static __always_inline void
atomic_long_and(long i,atomic_long_t * v)3904 atomic_long_and(long i, atomic_long_t *v)
3905 {
3906 	instrument_atomic_read_write(v, sizeof(*v));
3907 	raw_atomic_long_and(i, v);
3908 }
3909 
3910 /**
3911  * atomic_long_fetch_and() - atomic bitwise AND with full ordering
3912  * @i: long value
3913  * @v: pointer to atomic_long_t
3914  *
3915  * Atomically updates @v to (@v & @i) with full ordering.
3916  *
3917  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_and() there.
3918  *
3919  * Return: The original value of @v.
3920  */
3921 static __always_inline long
atomic_long_fetch_and(long i,atomic_long_t * v)3922 atomic_long_fetch_and(long i, atomic_long_t *v)
3923 {
3924 	kcsan_mb();
3925 	instrument_atomic_read_write(v, sizeof(*v));
3926 	return raw_atomic_long_fetch_and(i, v);
3927 }
3928 
3929 /**
3930  * atomic_long_fetch_and_acquire() - atomic bitwise AND with acquire ordering
3931  * @i: long value
3932  * @v: pointer to atomic_long_t
3933  *
3934  * Atomically updates @v to (@v & @i) with acquire ordering.
3935  *
3936  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_and_acquire() there.
3937  *
3938  * Return: The original value of @v.
3939  */
3940 static __always_inline long
atomic_long_fetch_and_acquire(long i,atomic_long_t * v)3941 atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
3942 {
3943 	instrument_atomic_read_write(v, sizeof(*v));
3944 	return raw_atomic_long_fetch_and_acquire(i, v);
3945 }
3946 
3947 /**
3948  * atomic_long_fetch_and_release() - atomic bitwise AND with release ordering
3949  * @i: long value
3950  * @v: pointer to atomic_long_t
3951  *
3952  * Atomically updates @v to (@v & @i) with release ordering.
3953  *
3954  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_and_release() there.
3955  *
3956  * Return: The original value of @v.
3957  */
3958 static __always_inline long
atomic_long_fetch_and_release(long i,atomic_long_t * v)3959 atomic_long_fetch_and_release(long i, atomic_long_t *v)
3960 {
3961 	kcsan_release();
3962 	instrument_atomic_read_write(v, sizeof(*v));
3963 	return raw_atomic_long_fetch_and_release(i, v);
3964 }
3965 
3966 /**
3967  * atomic_long_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
3968  * @i: long value
3969  * @v: pointer to atomic_long_t
3970  *
3971  * Atomically updates @v to (@v & @i) with relaxed ordering.
3972  *
3973  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_and_relaxed() there.
3974  *
3975  * Return: The original value of @v.
3976  */
3977 static __always_inline long
atomic_long_fetch_and_relaxed(long i,atomic_long_t * v)3978 atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
3979 {
3980 	instrument_atomic_read_write(v, sizeof(*v));
3981 	return raw_atomic_long_fetch_and_relaxed(i, v);
3982 }
3983 
3984 /**
3985  * atomic_long_andnot() - atomic bitwise AND NOT with relaxed ordering
3986  * @i: long value
3987  * @v: pointer to atomic_long_t
3988  *
3989  * Atomically updates @v to (@v & ~@i) with relaxed ordering.
3990  *
3991  * Unsafe to use in noinstr code; use raw_atomic_long_andnot() there.
3992  *
3993  * Return: Nothing.
3994  */
3995 static __always_inline void
atomic_long_andnot(long i,atomic_long_t * v)3996 atomic_long_andnot(long i, atomic_long_t *v)
3997 {
3998 	instrument_atomic_read_write(v, sizeof(*v));
3999 	raw_atomic_long_andnot(i, v);
4000 }
4001 
4002 /**
4003  * atomic_long_fetch_andnot() - atomic bitwise AND NOT with full ordering
4004  * @i: long value
4005  * @v: pointer to atomic_long_t
4006  *
4007  * Atomically updates @v to (@v & ~@i) with full ordering.
4008  *
4009  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_andnot() there.
4010  *
4011  * Return: The original value of @v.
4012  */
4013 static __always_inline long
atomic_long_fetch_andnot(long i,atomic_long_t * v)4014 atomic_long_fetch_andnot(long i, atomic_long_t *v)
4015 {
4016 	kcsan_mb();
4017 	instrument_atomic_read_write(v, sizeof(*v));
4018 	return raw_atomic_long_fetch_andnot(i, v);
4019 }
4020 
4021 /**
4022  * atomic_long_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
4023  * @i: long value
4024  * @v: pointer to atomic_long_t
4025  *
4026  * Atomically updates @v to (@v & ~@i) with acquire ordering.
4027  *
4028  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_andnot_acquire() there.
4029  *
4030  * Return: The original value of @v.
4031  */
4032 static __always_inline long
atomic_long_fetch_andnot_acquire(long i,atomic_long_t * v)4033 atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
4034 {
4035 	instrument_atomic_read_write(v, sizeof(*v));
4036 	return raw_atomic_long_fetch_andnot_acquire(i, v);
4037 }
4038 
4039 /**
4040  * atomic_long_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
4041  * @i: long value
4042  * @v: pointer to atomic_long_t
4043  *
4044  * Atomically updates @v to (@v & ~@i) with release ordering.
4045  *
4046  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_andnot_release() there.
4047  *
4048  * Return: The original value of @v.
4049  */
4050 static __always_inline long
atomic_long_fetch_andnot_release(long i,atomic_long_t * v)4051 atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
4052 {
4053 	kcsan_release();
4054 	instrument_atomic_read_write(v, sizeof(*v));
4055 	return raw_atomic_long_fetch_andnot_release(i, v);
4056 }
4057 
4058 /**
4059  * atomic_long_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
4060  * @i: long value
4061  * @v: pointer to atomic_long_t
4062  *
4063  * Atomically updates @v to (@v & ~@i) with relaxed ordering.
4064  *
4065  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_andnot_relaxed() there.
4066  *
4067  * Return: The original value of @v.
4068  */
4069 static __always_inline long
atomic_long_fetch_andnot_relaxed(long i,atomic_long_t * v)4070 atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
4071 {
4072 	instrument_atomic_read_write(v, sizeof(*v));
4073 	return raw_atomic_long_fetch_andnot_relaxed(i, v);
4074 }
4075 
4076 /**
4077  * atomic_long_or() - atomic bitwise OR with relaxed ordering
4078  * @i: long value
4079  * @v: pointer to atomic_long_t
4080  *
4081  * Atomically updates @v to (@v | @i) with relaxed ordering.
4082  *
4083  * Unsafe to use in noinstr code; use raw_atomic_long_or() there.
4084  *
4085  * Return: Nothing.
4086  */
4087 static __always_inline void
atomic_long_or(long i,atomic_long_t * v)4088 atomic_long_or(long i, atomic_long_t *v)
4089 {
4090 	instrument_atomic_read_write(v, sizeof(*v));
4091 	raw_atomic_long_or(i, v);
4092 }
4093 
4094 /**
4095  * atomic_long_fetch_or() - atomic bitwise OR with full ordering
4096  * @i: long value
4097  * @v: pointer to atomic_long_t
4098  *
4099  * Atomically updates @v to (@v | @i) with full ordering.
4100  *
4101  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_or() there.
4102  *
4103  * Return: The original value of @v.
4104  */
4105 static __always_inline long
atomic_long_fetch_or(long i,atomic_long_t * v)4106 atomic_long_fetch_or(long i, atomic_long_t *v)
4107 {
4108 	kcsan_mb();
4109 	instrument_atomic_read_write(v, sizeof(*v));
4110 	return raw_atomic_long_fetch_or(i, v);
4111 }
4112 
4113 /**
4114  * atomic_long_fetch_or_acquire() - atomic bitwise OR with acquire ordering
4115  * @i: long value
4116  * @v: pointer to atomic_long_t
4117  *
4118  * Atomically updates @v to (@v | @i) with acquire ordering.
4119  *
4120  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_or_acquire() there.
4121  *
4122  * Return: The original value of @v.
4123  */
4124 static __always_inline long
atomic_long_fetch_or_acquire(long i,atomic_long_t * v)4125 atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
4126 {
4127 	instrument_atomic_read_write(v, sizeof(*v));
4128 	return raw_atomic_long_fetch_or_acquire(i, v);
4129 }
4130 
4131 /**
4132  * atomic_long_fetch_or_release() - atomic bitwise OR with release ordering
4133  * @i: long value
4134  * @v: pointer to atomic_long_t
4135  *
4136  * Atomically updates @v to (@v | @i) with release ordering.
4137  *
4138  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_or_release() there.
4139  *
4140  * Return: The original value of @v.
4141  */
4142 static __always_inline long
atomic_long_fetch_or_release(long i,atomic_long_t * v)4143 atomic_long_fetch_or_release(long i, atomic_long_t *v)
4144 {
4145 	kcsan_release();
4146 	instrument_atomic_read_write(v, sizeof(*v));
4147 	return raw_atomic_long_fetch_or_release(i, v);
4148 }
4149 
4150 /**
4151  * atomic_long_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
4152  * @i: long value
4153  * @v: pointer to atomic_long_t
4154  *
4155  * Atomically updates @v to (@v | @i) with relaxed ordering.
4156  *
4157  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_or_relaxed() there.
4158  *
4159  * Return: The original value of @v.
4160  */
4161 static __always_inline long
atomic_long_fetch_or_relaxed(long i,atomic_long_t * v)4162 atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
4163 {
4164 	instrument_atomic_read_write(v, sizeof(*v));
4165 	return raw_atomic_long_fetch_or_relaxed(i, v);
4166 }
4167 
4168 /**
4169  * atomic_long_xor() - atomic bitwise XOR with relaxed ordering
4170  * @i: long value
4171  * @v: pointer to atomic_long_t
4172  *
4173  * Atomically updates @v to (@v ^ @i) with relaxed ordering.
4174  *
4175  * Unsafe to use in noinstr code; use raw_atomic_long_xor() there.
4176  *
4177  * Return: Nothing.
4178  */
4179 static __always_inline void
atomic_long_xor(long i,atomic_long_t * v)4180 atomic_long_xor(long i, atomic_long_t *v)
4181 {
4182 	instrument_atomic_read_write(v, sizeof(*v));
4183 	raw_atomic_long_xor(i, v);
4184 }
4185 
4186 /**
4187  * atomic_long_fetch_xor() - atomic bitwise XOR with full ordering
4188  * @i: long value
4189  * @v: pointer to atomic_long_t
4190  *
4191  * Atomically updates @v to (@v ^ @i) with full ordering.
4192  *
4193  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_xor() there.
4194  *
4195  * Return: The original value of @v.
4196  */
4197 static __always_inline long
atomic_long_fetch_xor(long i,atomic_long_t * v)4198 atomic_long_fetch_xor(long i, atomic_long_t *v)
4199 {
4200 	kcsan_mb();
4201 	instrument_atomic_read_write(v, sizeof(*v));
4202 	return raw_atomic_long_fetch_xor(i, v);
4203 }
4204 
4205 /**
4206  * atomic_long_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
4207  * @i: long value
4208  * @v: pointer to atomic_long_t
4209  *
4210  * Atomically updates @v to (@v ^ @i) with acquire ordering.
4211  *
4212  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_xor_acquire() there.
4213  *
4214  * Return: The original value of @v.
4215  */
4216 static __always_inline long
atomic_long_fetch_xor_acquire(long i,atomic_long_t * v)4217 atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
4218 {
4219 	instrument_atomic_read_write(v, sizeof(*v));
4220 	return raw_atomic_long_fetch_xor_acquire(i, v);
4221 }
4222 
4223 /**
4224  * atomic_long_fetch_xor_release() - atomic bitwise XOR with release ordering
4225  * @i: long value
4226  * @v: pointer to atomic_long_t
4227  *
4228  * Atomically updates @v to (@v ^ @i) with release ordering.
4229  *
4230  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_xor_release() there.
4231  *
4232  * Return: The original value of @v.
4233  */
4234 static __always_inline long
atomic_long_fetch_xor_release(long i,atomic_long_t * v)4235 atomic_long_fetch_xor_release(long i, atomic_long_t *v)
4236 {
4237 	kcsan_release();
4238 	instrument_atomic_read_write(v, sizeof(*v));
4239 	return raw_atomic_long_fetch_xor_release(i, v);
4240 }
4241 
4242 /**
4243  * atomic_long_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
4244  * @i: long value
4245  * @v: pointer to atomic_long_t
4246  *
4247  * Atomically updates @v to (@v ^ @i) with relaxed ordering.
4248  *
4249  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_xor_relaxed() there.
4250  *
4251  * Return: The original value of @v.
4252  */
4253 static __always_inline long
atomic_long_fetch_xor_relaxed(long i,atomic_long_t * v)4254 atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
4255 {
4256 	instrument_atomic_read_write(v, sizeof(*v));
4257 	return raw_atomic_long_fetch_xor_relaxed(i, v);
4258 }
4259 
4260 /**
4261  * atomic_long_xchg() - atomic exchange with full ordering
4262  * @v: pointer to atomic_long_t
4263  * @new: long value to assign
4264  *
4265  * Atomically updates @v to @new with full ordering.
4266  *
4267  * Unsafe to use in noinstr code; use raw_atomic_long_xchg() there.
4268  *
4269  * Return: The original value of @v.
4270  */
4271 static __always_inline long
atomic_long_xchg(atomic_long_t * v,long new)4272 atomic_long_xchg(atomic_long_t *v, long new)
4273 {
4274 	kcsan_mb();
4275 	instrument_atomic_read_write(v, sizeof(*v));
4276 	return raw_atomic_long_xchg(v, new);
4277 }
4278 
4279 /**
4280  * atomic_long_xchg_acquire() - atomic exchange with acquire ordering
4281  * @v: pointer to atomic_long_t
4282  * @new: long value to assign
4283  *
4284  * Atomically updates @v to @new with acquire ordering.
4285  *
4286  * Unsafe to use in noinstr code; use raw_atomic_long_xchg_acquire() there.
4287  *
4288  * Return: The original value of @v.
4289  */
4290 static __always_inline long
atomic_long_xchg_acquire(atomic_long_t * v,long new)4291 atomic_long_xchg_acquire(atomic_long_t *v, long new)
4292 {
4293 	instrument_atomic_read_write(v, sizeof(*v));
4294 	return raw_atomic_long_xchg_acquire(v, new);
4295 }
4296 
4297 /**
4298  * atomic_long_xchg_release() - atomic exchange with release ordering
4299  * @v: pointer to atomic_long_t
4300  * @new: long value to assign
4301  *
4302  * Atomically updates @v to @new with release ordering.
4303  *
4304  * Unsafe to use in noinstr code; use raw_atomic_long_xchg_release() there.
4305  *
4306  * Return: The original value of @v.
4307  */
4308 static __always_inline long
atomic_long_xchg_release(atomic_long_t * v,long new)4309 atomic_long_xchg_release(atomic_long_t *v, long new)
4310 {
4311 	kcsan_release();
4312 	instrument_atomic_read_write(v, sizeof(*v));
4313 	return raw_atomic_long_xchg_release(v, new);
4314 }
4315 
4316 /**
4317  * atomic_long_xchg_relaxed() - atomic exchange with relaxed ordering
4318  * @v: pointer to atomic_long_t
4319  * @new: long value to assign
4320  *
4321  * Atomically updates @v to @new with relaxed ordering.
4322  *
4323  * Unsafe to use in noinstr code; use raw_atomic_long_xchg_relaxed() there.
4324  *
4325  * Return: The original value of @v.
4326  */
4327 static __always_inline long
atomic_long_xchg_relaxed(atomic_long_t * v,long new)4328 atomic_long_xchg_relaxed(atomic_long_t *v, long new)
4329 {
4330 	instrument_atomic_read_write(v, sizeof(*v));
4331 	return raw_atomic_long_xchg_relaxed(v, new);
4332 }
4333 
4334 /**
4335  * atomic_long_cmpxchg() - atomic compare and exchange with full ordering
4336  * @v: pointer to atomic_long_t
4337  * @old: long value to compare with
4338  * @new: long value to assign
4339  *
4340  * If (@v == @old), atomically updates @v to @new with full ordering.
4341  * Otherwise, @v is not modified and relaxed ordering is provided.
4342  *
4343  * Unsafe to use in noinstr code; use raw_atomic_long_cmpxchg() there.
4344  *
4345  * Return: The original value of @v.
4346  */
4347 static __always_inline long
atomic_long_cmpxchg(atomic_long_t * v,long old,long new)4348 atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
4349 {
4350 	kcsan_mb();
4351 	instrument_atomic_read_write(v, sizeof(*v));
4352 	return raw_atomic_long_cmpxchg(v, old, new);
4353 }
4354 
4355 /**
4356  * atomic_long_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
4357  * @v: pointer to atomic_long_t
4358  * @old: long value to compare with
4359  * @new: long value to assign
4360  *
4361  * If (@v == @old), atomically updates @v to @new with acquire ordering.
4362  * Otherwise, @v is not modified and relaxed ordering is provided.
4363  *
4364  * Unsafe to use in noinstr code; use raw_atomic_long_cmpxchg_acquire() there.
4365  *
4366  * Return: The original value of @v.
4367  */
4368 static __always_inline long
atomic_long_cmpxchg_acquire(atomic_long_t * v,long old,long new)4369 atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
4370 {
4371 	instrument_atomic_read_write(v, sizeof(*v));
4372 	return raw_atomic_long_cmpxchg_acquire(v, old, new);
4373 }
4374 
4375 /**
4376  * atomic_long_cmpxchg_release() - atomic compare and exchange with release ordering
4377  * @v: pointer to atomic_long_t
4378  * @old: long value to compare with
4379  * @new: long value to assign
4380  *
4381  * If (@v == @old), atomically updates @v to @new with release ordering.
4382  * Otherwise, @v is not modified and relaxed ordering is provided.
4383  *
4384  * Unsafe to use in noinstr code; use raw_atomic_long_cmpxchg_release() there.
4385  *
4386  * Return: The original value of @v.
4387  */
4388 static __always_inline long
atomic_long_cmpxchg_release(atomic_long_t * v,long old,long new)4389 atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
4390 {
4391 	kcsan_release();
4392 	instrument_atomic_read_write(v, sizeof(*v));
4393 	return raw_atomic_long_cmpxchg_release(v, old, new);
4394 }
4395 
4396 /**
4397  * atomic_long_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
4398  * @v: pointer to atomic_long_t
4399  * @old: long value to compare with
4400  * @new: long value to assign
4401  *
4402  * If (@v == @old), atomically updates @v to @new with relaxed ordering.
4403  * Otherwise, @v is not modified and relaxed ordering is provided.
4404  *
4405  * Unsafe to use in noinstr code; use raw_atomic_long_cmpxchg_relaxed() there.
4406  *
4407  * Return: The original value of @v.
4408  */
4409 static __always_inline long
atomic_long_cmpxchg_relaxed(atomic_long_t * v,long old,long new)4410 atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
4411 {
4412 	instrument_atomic_read_write(v, sizeof(*v));
4413 	return raw_atomic_long_cmpxchg_relaxed(v, old, new);
4414 }
4415 
4416 /**
4417  * atomic_long_try_cmpxchg() - atomic compare and exchange with full ordering
4418  * @v: pointer to atomic_long_t
4419  * @old: pointer to long value to compare with
4420  * @new: long value to assign
4421  *
4422  * If (@v == @old), atomically updates @v to @new with full ordering.
4423  * Otherwise, @v is not modified, @old is updated to the current value of @v,
4424  * and relaxed ordering is provided.
4425  *
4426  * Unsafe to use in noinstr code; use raw_atomic_long_try_cmpxchg() there.
4427  *
4428  * Return: @true if the exchange occured, @false otherwise.
4429  */
4430 static __always_inline bool
atomic_long_try_cmpxchg(atomic_long_t * v,long * old,long new)4431 atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
4432 {
4433 	kcsan_mb();
4434 	instrument_atomic_read_write(v, sizeof(*v));
4435 	instrument_atomic_read_write(old, sizeof(*old));
4436 	return raw_atomic_long_try_cmpxchg(v, old, new);
4437 }
4438 
4439 /**
4440  * atomic_long_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
4441  * @v: pointer to atomic_long_t
4442  * @old: pointer to long value to compare with
4443  * @new: long value to assign
4444  *
4445  * If (@v == @old), atomically updates @v to @new with acquire ordering.
4446  * Otherwise, @v is not modified, @old is updated to the current value of @v,
4447  * and relaxed ordering is provided.
4448  *
4449  * Unsafe to use in noinstr code; use raw_atomic_long_try_cmpxchg_acquire() there.
4450  *
4451  * Return: @true if the exchange occured, @false otherwise.
4452  */
4453 static __always_inline bool
atomic_long_try_cmpxchg_acquire(atomic_long_t * v,long * old,long new)4454 atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
4455 {
4456 	instrument_atomic_read_write(v, sizeof(*v));
4457 	instrument_atomic_read_write(old, sizeof(*old));
4458 	return raw_atomic_long_try_cmpxchg_acquire(v, old, new);
4459 }
4460 
4461 /**
4462  * atomic_long_try_cmpxchg_release() - atomic compare and exchange with release ordering
4463  * @v: pointer to atomic_long_t
4464  * @old: pointer to long value to compare with
4465  * @new: long value to assign
4466  *
4467  * If (@v == @old), atomically updates @v to @new with release ordering.
4468  * Otherwise, @v is not modified, @old is updated to the current value of @v,
4469  * and relaxed ordering is provided.
4470  *
4471  * Unsafe to use in noinstr code; use raw_atomic_long_try_cmpxchg_release() there.
4472  *
4473  * Return: @true if the exchange occured, @false otherwise.
4474  */
4475 static __always_inline bool
atomic_long_try_cmpxchg_release(atomic_long_t * v,long * old,long new)4476 atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
4477 {
4478 	kcsan_release();
4479 	instrument_atomic_read_write(v, sizeof(*v));
4480 	instrument_atomic_read_write(old, sizeof(*old));
4481 	return raw_atomic_long_try_cmpxchg_release(v, old, new);
4482 }
4483 
4484 /**
4485  * atomic_long_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
4486  * @v: pointer to atomic_long_t
4487  * @old: pointer to long value to compare with
4488  * @new: long value to assign
4489  *
4490  * If (@v == @old), atomically updates @v to @new with relaxed ordering.
4491  * Otherwise, @v is not modified, @old is updated to the current value of @v,
4492  * and relaxed ordering is provided.
4493  *
4494  * Unsafe to use in noinstr code; use raw_atomic_long_try_cmpxchg_relaxed() there.
4495  *
4496  * Return: @true if the exchange occured, @false otherwise.
4497  */
4498 static __always_inline bool
atomic_long_try_cmpxchg_relaxed(atomic_long_t * v,long * old,long new)4499 atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
4500 {
4501 	instrument_atomic_read_write(v, sizeof(*v));
4502 	instrument_atomic_read_write(old, sizeof(*old));
4503 	return raw_atomic_long_try_cmpxchg_relaxed(v, old, new);
4504 }
4505 
4506 /**
4507  * atomic_long_sub_and_test() - atomic subtract and test if zero with full ordering
4508  * @i: long value to add
4509  * @v: pointer to atomic_long_t
4510  *
4511  * Atomically updates @v to (@v - @i) with full ordering.
4512  *
4513  * Unsafe to use in noinstr code; use raw_atomic_long_sub_and_test() there.
4514  *
4515  * Return: @true if the resulting value of @v is zero, @false otherwise.
4516  */
4517 static __always_inline bool
atomic_long_sub_and_test(long i,atomic_long_t * v)4518 atomic_long_sub_and_test(long i, atomic_long_t *v)
4519 {
4520 	kcsan_mb();
4521 	instrument_atomic_read_write(v, sizeof(*v));
4522 	return raw_atomic_long_sub_and_test(i, v);
4523 }
4524 
4525 /**
4526  * atomic_long_dec_and_test() - atomic decrement and test if zero with full ordering
4527  * @v: pointer to atomic_long_t
4528  *
4529  * Atomically updates @v to (@v - 1) with full ordering.
4530  *
4531  * Unsafe to use in noinstr code; use raw_atomic_long_dec_and_test() there.
4532  *
4533  * Return: @true if the resulting value of @v is zero, @false otherwise.
4534  */
4535 static __always_inline bool
atomic_long_dec_and_test(atomic_long_t * v)4536 atomic_long_dec_and_test(atomic_long_t *v)
4537 {
4538 	kcsan_mb();
4539 	instrument_atomic_read_write(v, sizeof(*v));
4540 	return raw_atomic_long_dec_and_test(v);
4541 }
4542 
4543 /**
4544  * atomic_long_inc_and_test() - atomic increment and test if zero with full ordering
4545  * @v: pointer to atomic_long_t
4546  *
4547  * Atomically updates @v to (@v + 1) with full ordering.
4548  *
4549  * Unsafe to use in noinstr code; use raw_atomic_long_inc_and_test() there.
4550  *
4551  * Return: @true if the resulting value of @v is zero, @false otherwise.
4552  */
4553 static __always_inline bool
atomic_long_inc_and_test(atomic_long_t * v)4554 atomic_long_inc_and_test(atomic_long_t *v)
4555 {
4556 	kcsan_mb();
4557 	instrument_atomic_read_write(v, sizeof(*v));
4558 	return raw_atomic_long_inc_and_test(v);
4559 }
4560 
4561 /**
4562  * atomic_long_add_negative() - atomic add and test if negative with full ordering
4563  * @i: long value to add
4564  * @v: pointer to atomic_long_t
4565  *
4566  * Atomically updates @v to (@v + @i) with full ordering.
4567  *
4568  * Unsafe to use in noinstr code; use raw_atomic_long_add_negative() there.
4569  *
4570  * Return: @true if the resulting value of @v is negative, @false otherwise.
4571  */
4572 static __always_inline bool
atomic_long_add_negative(long i,atomic_long_t * v)4573 atomic_long_add_negative(long i, atomic_long_t *v)
4574 {
4575 	kcsan_mb();
4576 	instrument_atomic_read_write(v, sizeof(*v));
4577 	return raw_atomic_long_add_negative(i, v);
4578 }
4579 
4580 /**
4581  * atomic_long_add_negative_acquire() - atomic add and test if negative with acquire ordering
4582  * @i: long value to add
4583  * @v: pointer to atomic_long_t
4584  *
4585  * Atomically updates @v to (@v + @i) with acquire ordering.
4586  *
4587  * Unsafe to use in noinstr code; use raw_atomic_long_add_negative_acquire() there.
4588  *
4589  * Return: @true if the resulting value of @v is negative, @false otherwise.
4590  */
4591 static __always_inline bool
atomic_long_add_negative_acquire(long i,atomic_long_t * v)4592 atomic_long_add_negative_acquire(long i, atomic_long_t *v)
4593 {
4594 	instrument_atomic_read_write(v, sizeof(*v));
4595 	return raw_atomic_long_add_negative_acquire(i, v);
4596 }
4597 
4598 /**
4599  * atomic_long_add_negative_release() - atomic add and test if negative with release ordering
4600  * @i: long value to add
4601  * @v: pointer to atomic_long_t
4602  *
4603  * Atomically updates @v to (@v + @i) with release ordering.
4604  *
4605  * Unsafe to use in noinstr code; use raw_atomic_long_add_negative_release() there.
4606  *
4607  * Return: @true if the resulting value of @v is negative, @false otherwise.
4608  */
4609 static __always_inline bool
atomic_long_add_negative_release(long i,atomic_long_t * v)4610 atomic_long_add_negative_release(long i, atomic_long_t *v)
4611 {
4612 	kcsan_release();
4613 	instrument_atomic_read_write(v, sizeof(*v));
4614 	return raw_atomic_long_add_negative_release(i, v);
4615 }
4616 
4617 /**
4618  * atomic_long_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
4619  * @i: long value to add
4620  * @v: pointer to atomic_long_t
4621  *
4622  * Atomically updates @v to (@v + @i) with relaxed ordering.
4623  *
4624  * Unsafe to use in noinstr code; use raw_atomic_long_add_negative_relaxed() there.
4625  *
4626  * Return: @true if the resulting value of @v is negative, @false otherwise.
4627  */
4628 static __always_inline bool
atomic_long_add_negative_relaxed(long i,atomic_long_t * v)4629 atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
4630 {
4631 	instrument_atomic_read_write(v, sizeof(*v));
4632 	return raw_atomic_long_add_negative_relaxed(i, v);
4633 }
4634 
4635 /**
4636  * atomic_long_fetch_add_unless() - atomic add unless value with full ordering
4637  * @v: pointer to atomic_long_t
4638  * @a: long value to add
4639  * @u: long value to compare with
4640  *
4641  * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
4642  * Otherwise, @v is not modified and relaxed ordering is provided.
4643  *
4644  * Unsafe to use in noinstr code; use raw_atomic_long_fetch_add_unless() there.
4645  *
4646  * Return: The original value of @v.
4647  */
4648 static __always_inline long
atomic_long_fetch_add_unless(atomic_long_t * v,long a,long u)4649 atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
4650 {
4651 	kcsan_mb();
4652 	instrument_atomic_read_write(v, sizeof(*v));
4653 	return raw_atomic_long_fetch_add_unless(v, a, u);
4654 }
4655 
4656 /**
4657  * atomic_long_add_unless() - atomic add unless value with full ordering
4658  * @v: pointer to atomic_long_t
4659  * @a: long value to add
4660  * @u: long value to compare with
4661  *
4662  * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
4663  * Otherwise, @v is not modified and relaxed ordering is provided.
4664  *
4665  * Unsafe to use in noinstr code; use raw_atomic_long_add_unless() there.
4666  *
4667  * Return: @true if @v was updated, @false otherwise.
4668  */
4669 static __always_inline bool
atomic_long_add_unless(atomic_long_t * v,long a,long u)4670 atomic_long_add_unless(atomic_long_t *v, long a, long u)
4671 {
4672 	kcsan_mb();
4673 	instrument_atomic_read_write(v, sizeof(*v));
4674 	return raw_atomic_long_add_unless(v, a, u);
4675 }
4676 
4677 /**
4678  * atomic_long_inc_not_zero() - atomic increment unless zero with full ordering
4679  * @v: pointer to atomic_long_t
4680  *
4681  * If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
4682  * Otherwise, @v is not modified and relaxed ordering is provided.
4683  *
4684  * Unsafe to use in noinstr code; use raw_atomic_long_inc_not_zero() there.
4685  *
4686  * Return: @true if @v was updated, @false otherwise.
4687  */
4688 static __always_inline bool
atomic_long_inc_not_zero(atomic_long_t * v)4689 atomic_long_inc_not_zero(atomic_long_t *v)
4690 {
4691 	kcsan_mb();
4692 	instrument_atomic_read_write(v, sizeof(*v));
4693 	return raw_atomic_long_inc_not_zero(v);
4694 }
4695 
4696 /**
4697  * atomic_long_inc_unless_negative() - atomic increment unless negative with full ordering
4698  * @v: pointer to atomic_long_t
4699  *
4700  * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
4701  * Otherwise, @v is not modified and relaxed ordering is provided.
4702  *
4703  * Unsafe to use in noinstr code; use raw_atomic_long_inc_unless_negative() there.
4704  *
4705  * Return: @true if @v was updated, @false otherwise.
4706  */
4707 static __always_inline bool
atomic_long_inc_unless_negative(atomic_long_t * v)4708 atomic_long_inc_unless_negative(atomic_long_t *v)
4709 {
4710 	kcsan_mb();
4711 	instrument_atomic_read_write(v, sizeof(*v));
4712 	return raw_atomic_long_inc_unless_negative(v);
4713 }
4714 
4715 /**
4716  * atomic_long_dec_unless_positive() - atomic decrement unless positive with full ordering
4717  * @v: pointer to atomic_long_t
4718  *
4719  * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
4720  * Otherwise, @v is not modified and relaxed ordering is provided.
4721  *
4722  * Unsafe to use in noinstr code; use raw_atomic_long_dec_unless_positive() there.
4723  *
4724  * Return: @true if @v was updated, @false otherwise.
4725  */
4726 static __always_inline bool
atomic_long_dec_unless_positive(atomic_long_t * v)4727 atomic_long_dec_unless_positive(atomic_long_t *v)
4728 {
4729 	kcsan_mb();
4730 	instrument_atomic_read_write(v, sizeof(*v));
4731 	return raw_atomic_long_dec_unless_positive(v);
4732 }
4733 
4734 /**
4735  * atomic_long_dec_if_positive() - atomic decrement if positive with full ordering
4736  * @v: pointer to atomic_long_t
4737  *
4738  * If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
4739  * Otherwise, @v is not modified and relaxed ordering is provided.
4740  *
4741  * Unsafe to use in noinstr code; use raw_atomic_long_dec_if_positive() there.
4742  *
4743  * Return: The old value of (@v - 1), regardless of whether @v was updated.
4744  */
4745 static __always_inline long
atomic_long_dec_if_positive(atomic_long_t * v)4746 atomic_long_dec_if_positive(atomic_long_t *v)
4747 {
4748 	kcsan_mb();
4749 	instrument_atomic_read_write(v, sizeof(*v));
4750 	return raw_atomic_long_dec_if_positive(v);
4751 }
4752 
4753 #define xchg(ptr, ...) \
4754 ({ \
4755 	typeof(ptr) __ai_ptr = (ptr); \
4756 	kcsan_mb(); \
4757 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4758 	raw_xchg(__ai_ptr, __VA_ARGS__); \
4759 })
4760 
4761 #define xchg_acquire(ptr, ...) \
4762 ({ \
4763 	typeof(ptr) __ai_ptr = (ptr); \
4764 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4765 	raw_xchg_acquire(__ai_ptr, __VA_ARGS__); \
4766 })
4767 
4768 #define xchg_release(ptr, ...) \
4769 ({ \
4770 	typeof(ptr) __ai_ptr = (ptr); \
4771 	kcsan_release(); \
4772 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4773 	raw_xchg_release(__ai_ptr, __VA_ARGS__); \
4774 })
4775 
4776 #define xchg_relaxed(ptr, ...) \
4777 ({ \
4778 	typeof(ptr) __ai_ptr = (ptr); \
4779 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4780 	raw_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
4781 })
4782 
4783 #define cmpxchg(ptr, ...) \
4784 ({ \
4785 	typeof(ptr) __ai_ptr = (ptr); \
4786 	kcsan_mb(); \
4787 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4788 	raw_cmpxchg(__ai_ptr, __VA_ARGS__); \
4789 })
4790 
4791 #define cmpxchg_acquire(ptr, ...) \
4792 ({ \
4793 	typeof(ptr) __ai_ptr = (ptr); \
4794 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4795 	raw_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
4796 })
4797 
4798 #define cmpxchg_release(ptr, ...) \
4799 ({ \
4800 	typeof(ptr) __ai_ptr = (ptr); \
4801 	kcsan_release(); \
4802 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4803 	raw_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
4804 })
4805 
4806 #define cmpxchg_relaxed(ptr, ...) \
4807 ({ \
4808 	typeof(ptr) __ai_ptr = (ptr); \
4809 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4810 	raw_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
4811 })
4812 
4813 #define cmpxchg64(ptr, ...) \
4814 ({ \
4815 	typeof(ptr) __ai_ptr = (ptr); \
4816 	kcsan_mb(); \
4817 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4818 	raw_cmpxchg64(__ai_ptr, __VA_ARGS__); \
4819 })
4820 
4821 #define cmpxchg64_acquire(ptr, ...) \
4822 ({ \
4823 	typeof(ptr) __ai_ptr = (ptr); \
4824 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4825 	raw_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
4826 })
4827 
4828 #define cmpxchg64_release(ptr, ...) \
4829 ({ \
4830 	typeof(ptr) __ai_ptr = (ptr); \
4831 	kcsan_release(); \
4832 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4833 	raw_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
4834 })
4835 
4836 #define cmpxchg64_relaxed(ptr, ...) \
4837 ({ \
4838 	typeof(ptr) __ai_ptr = (ptr); \
4839 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4840 	raw_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
4841 })
4842 
4843 #define cmpxchg128(ptr, ...) \
4844 ({ \
4845 	typeof(ptr) __ai_ptr = (ptr); \
4846 	kcsan_mb(); \
4847 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4848 	raw_cmpxchg128(__ai_ptr, __VA_ARGS__); \
4849 })
4850 
4851 #define cmpxchg128_acquire(ptr, ...) \
4852 ({ \
4853 	typeof(ptr) __ai_ptr = (ptr); \
4854 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4855 	raw_cmpxchg128_acquire(__ai_ptr, __VA_ARGS__); \
4856 })
4857 
4858 #define cmpxchg128_release(ptr, ...) \
4859 ({ \
4860 	typeof(ptr) __ai_ptr = (ptr); \
4861 	kcsan_release(); \
4862 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4863 	raw_cmpxchg128_release(__ai_ptr, __VA_ARGS__); \
4864 })
4865 
4866 #define cmpxchg128_relaxed(ptr, ...) \
4867 ({ \
4868 	typeof(ptr) __ai_ptr = (ptr); \
4869 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4870 	raw_cmpxchg128_relaxed(__ai_ptr, __VA_ARGS__); \
4871 })
4872 
4873 #define try_cmpxchg(ptr, oldp, ...) \
4874 ({ \
4875 	typeof(ptr) __ai_ptr = (ptr); \
4876 	typeof(oldp) __ai_oldp = (oldp); \
4877 	kcsan_mb(); \
4878 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4879 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4880 	raw_try_cmpxchg(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4881 })
4882 
4883 #define try_cmpxchg_acquire(ptr, oldp, ...) \
4884 ({ \
4885 	typeof(ptr) __ai_ptr = (ptr); \
4886 	typeof(oldp) __ai_oldp = (oldp); \
4887 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4888 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4889 	raw_try_cmpxchg_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4890 })
4891 
4892 #define try_cmpxchg_release(ptr, oldp, ...) \
4893 ({ \
4894 	typeof(ptr) __ai_ptr = (ptr); \
4895 	typeof(oldp) __ai_oldp = (oldp); \
4896 	kcsan_release(); \
4897 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4898 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4899 	raw_try_cmpxchg_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4900 })
4901 
4902 #define try_cmpxchg_relaxed(ptr, oldp, ...) \
4903 ({ \
4904 	typeof(ptr) __ai_ptr = (ptr); \
4905 	typeof(oldp) __ai_oldp = (oldp); \
4906 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4907 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4908 	raw_try_cmpxchg_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4909 })
4910 
4911 #define try_cmpxchg64(ptr, oldp, ...) \
4912 ({ \
4913 	typeof(ptr) __ai_ptr = (ptr); \
4914 	typeof(oldp) __ai_oldp = (oldp); \
4915 	kcsan_mb(); \
4916 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4917 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4918 	raw_try_cmpxchg64(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4919 })
4920 
4921 #define try_cmpxchg64_acquire(ptr, oldp, ...) \
4922 ({ \
4923 	typeof(ptr) __ai_ptr = (ptr); \
4924 	typeof(oldp) __ai_oldp = (oldp); \
4925 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4926 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4927 	raw_try_cmpxchg64_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4928 })
4929 
4930 #define try_cmpxchg64_release(ptr, oldp, ...) \
4931 ({ \
4932 	typeof(ptr) __ai_ptr = (ptr); \
4933 	typeof(oldp) __ai_oldp = (oldp); \
4934 	kcsan_release(); \
4935 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4936 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4937 	raw_try_cmpxchg64_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4938 })
4939 
4940 #define try_cmpxchg64_relaxed(ptr, oldp, ...) \
4941 ({ \
4942 	typeof(ptr) __ai_ptr = (ptr); \
4943 	typeof(oldp) __ai_oldp = (oldp); \
4944 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4945 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4946 	raw_try_cmpxchg64_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4947 })
4948 
4949 #define try_cmpxchg128(ptr, oldp, ...) \
4950 ({ \
4951 	typeof(ptr) __ai_ptr = (ptr); \
4952 	typeof(oldp) __ai_oldp = (oldp); \
4953 	kcsan_mb(); \
4954 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4955 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4956 	raw_try_cmpxchg128(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4957 })
4958 
4959 #define try_cmpxchg128_acquire(ptr, oldp, ...) \
4960 ({ \
4961 	typeof(ptr) __ai_ptr = (ptr); \
4962 	typeof(oldp) __ai_oldp = (oldp); \
4963 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4964 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4965 	raw_try_cmpxchg128_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4966 })
4967 
4968 #define try_cmpxchg128_release(ptr, oldp, ...) \
4969 ({ \
4970 	typeof(ptr) __ai_ptr = (ptr); \
4971 	typeof(oldp) __ai_oldp = (oldp); \
4972 	kcsan_release(); \
4973 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4974 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4975 	raw_try_cmpxchg128_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4976 })
4977 
4978 #define try_cmpxchg128_relaxed(ptr, oldp, ...) \
4979 ({ \
4980 	typeof(ptr) __ai_ptr = (ptr); \
4981 	typeof(oldp) __ai_oldp = (oldp); \
4982 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4983 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
4984 	raw_try_cmpxchg128_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
4985 })
4986 
4987 #define cmpxchg_local(ptr, ...) \
4988 ({ \
4989 	typeof(ptr) __ai_ptr = (ptr); \
4990 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4991 	raw_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
4992 })
4993 
4994 #define cmpxchg64_local(ptr, ...) \
4995 ({ \
4996 	typeof(ptr) __ai_ptr = (ptr); \
4997 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
4998 	raw_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
4999 })
5000 
5001 #define cmpxchg128_local(ptr, ...) \
5002 ({ \
5003 	typeof(ptr) __ai_ptr = (ptr); \
5004 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
5005 	raw_cmpxchg128_local(__ai_ptr, __VA_ARGS__); \
5006 })
5007 
5008 #define sync_cmpxchg(ptr, ...) \
5009 ({ \
5010 	typeof(ptr) __ai_ptr = (ptr); \
5011 	kcsan_mb(); \
5012 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
5013 	raw_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
5014 })
5015 
5016 #define try_cmpxchg_local(ptr, oldp, ...) \
5017 ({ \
5018 	typeof(ptr) __ai_ptr = (ptr); \
5019 	typeof(oldp) __ai_oldp = (oldp); \
5020 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
5021 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
5022 	raw_try_cmpxchg_local(__ai_ptr, __ai_oldp, __VA_ARGS__); \
5023 })
5024 
5025 #define try_cmpxchg64_local(ptr, oldp, ...) \
5026 ({ \
5027 	typeof(ptr) __ai_ptr = (ptr); \
5028 	typeof(oldp) __ai_oldp = (oldp); \
5029 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
5030 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
5031 	raw_try_cmpxchg64_local(__ai_ptr, __ai_oldp, __VA_ARGS__); \
5032 })
5033 
5034 #define try_cmpxchg128_local(ptr, oldp, ...) \
5035 ({ \
5036 	typeof(ptr) __ai_ptr = (ptr); \
5037 	typeof(oldp) __ai_oldp = (oldp); \
5038 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
5039 	instrument_read_write(__ai_oldp, sizeof(*__ai_oldp)); \
5040 	raw_try_cmpxchg128_local(__ai_ptr, __ai_oldp, __VA_ARGS__); \
5041 })
5042 
5043 #define sync_try_cmpxchg(ptr, ...) \
5044 ({ \
5045 	typeof(ptr) __ai_ptr = (ptr); \
5046 	kcsan_mb(); \
5047 	instrument_atomic_read_write(__ai_ptr, sizeof(*__ai_ptr)); \
5048 	raw_sync_try_cmpxchg(__ai_ptr, __VA_ARGS__); \
5049 })
5050 
5051 
5052 #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
5053 // ce5b65e0f1f8a276268b667194581d24bed219d4
5054