1 // SPDX-License-Identifier: GPL-2.0
2
3 // Generated by scripts/atomic/gen-atomic-long.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5
6 #ifndef _LINUX_ATOMIC_LONG_H
7 #define _LINUX_ATOMIC_LONG_H
8
9 #include <linux/compiler.h>
10 #include <asm/types.h>
11
12 #ifdef CONFIG_64BIT
13 typedef atomic64_t atomic_long_t;
14 #define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
15 #define atomic_long_cond_read_acquire atomic64_cond_read_acquire
16 #define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed
17 #else
18 typedef atomic_t atomic_long_t;
19 #define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
20 #define atomic_long_cond_read_acquire atomic_cond_read_acquire
21 #define atomic_long_cond_read_relaxed atomic_cond_read_relaxed
22 #endif
23
24 /**
25 * raw_atomic_long_read() - atomic load with relaxed ordering
26 * @v: pointer to atomic_long_t
27 *
28 * Atomically loads the value of @v with relaxed ordering.
29 *
30 * Safe to use in noinstr code; prefer atomic_long_read() elsewhere.
31 *
32 * Return: The value loaded from @v.
33 */
34 static __always_inline long
raw_atomic_long_read(const atomic_long_t * v)35 raw_atomic_long_read(const atomic_long_t *v)
36 {
37 #ifdef CONFIG_64BIT
38 return raw_atomic64_read(v);
39 #else
40 return raw_atomic_read(v);
41 #endif
42 }
43
44 /**
45 * raw_atomic_long_read_acquire() - atomic load with acquire ordering
46 * @v: pointer to atomic_long_t
47 *
48 * Atomically loads the value of @v with acquire ordering.
49 *
50 * Safe to use in noinstr code; prefer atomic_long_read_acquire() elsewhere.
51 *
52 * Return: The value loaded from @v.
53 */
54 static __always_inline long
raw_atomic_long_read_acquire(const atomic_long_t * v)55 raw_atomic_long_read_acquire(const atomic_long_t *v)
56 {
57 #ifdef CONFIG_64BIT
58 return raw_atomic64_read_acquire(v);
59 #else
60 return raw_atomic_read_acquire(v);
61 #endif
62 }
63
64 /**
65 * raw_atomic_long_set() - atomic set with relaxed ordering
66 * @v: pointer to atomic_long_t
67 * @i: long value to assign
68 *
69 * Atomically sets @v to @i with relaxed ordering.
70 *
71 * Safe to use in noinstr code; prefer atomic_long_set() elsewhere.
72 *
73 * Return: Nothing.
74 */
75 static __always_inline void
raw_atomic_long_set(atomic_long_t * v,long i)76 raw_atomic_long_set(atomic_long_t *v, long i)
77 {
78 #ifdef CONFIG_64BIT
79 raw_atomic64_set(v, i);
80 #else
81 raw_atomic_set(v, i);
82 #endif
83 }
84
85 /**
86 * raw_atomic_long_set_release() - atomic set with release ordering
87 * @v: pointer to atomic_long_t
88 * @i: long value to assign
89 *
90 * Atomically sets @v to @i with release ordering.
91 *
92 * Safe to use in noinstr code; prefer atomic_long_set_release() elsewhere.
93 *
94 * Return: Nothing.
95 */
96 static __always_inline void
raw_atomic_long_set_release(atomic_long_t * v,long i)97 raw_atomic_long_set_release(atomic_long_t *v, long i)
98 {
99 #ifdef CONFIG_64BIT
100 raw_atomic64_set_release(v, i);
101 #else
102 raw_atomic_set_release(v, i);
103 #endif
104 }
105
106 /**
107 * raw_atomic_long_add() - atomic add with relaxed ordering
108 * @i: long value to add
109 * @v: pointer to atomic_long_t
110 *
111 * Atomically updates @v to (@v + @i) with relaxed ordering.
112 *
113 * Safe to use in noinstr code; prefer atomic_long_add() elsewhere.
114 *
115 * Return: Nothing.
116 */
117 static __always_inline void
raw_atomic_long_add(long i,atomic_long_t * v)118 raw_atomic_long_add(long i, atomic_long_t *v)
119 {
120 #ifdef CONFIG_64BIT
121 raw_atomic64_add(i, v);
122 #else
123 raw_atomic_add(i, v);
124 #endif
125 }
126
127 /**
128 * raw_atomic_long_add_return() - atomic add with full ordering
129 * @i: long value to add
130 * @v: pointer to atomic_long_t
131 *
132 * Atomically updates @v to (@v + @i) with full ordering.
133 *
134 * Safe to use in noinstr code; prefer atomic_long_add_return() elsewhere.
135 *
136 * Return: The updated value of @v.
137 */
138 static __always_inline long
raw_atomic_long_add_return(long i,atomic_long_t * v)139 raw_atomic_long_add_return(long i, atomic_long_t *v)
140 {
141 #ifdef CONFIG_64BIT
142 return raw_atomic64_add_return(i, v);
143 #else
144 return raw_atomic_add_return(i, v);
145 #endif
146 }
147
148 /**
149 * raw_atomic_long_add_return_acquire() - atomic add with acquire ordering
150 * @i: long value to add
151 * @v: pointer to atomic_long_t
152 *
153 * Atomically updates @v to (@v + @i) with acquire ordering.
154 *
155 * Safe to use in noinstr code; prefer atomic_long_add_return_acquire() elsewhere.
156 *
157 * Return: The updated value of @v.
158 */
159 static __always_inline long
raw_atomic_long_add_return_acquire(long i,atomic_long_t * v)160 raw_atomic_long_add_return_acquire(long i, atomic_long_t *v)
161 {
162 #ifdef CONFIG_64BIT
163 return raw_atomic64_add_return_acquire(i, v);
164 #else
165 return raw_atomic_add_return_acquire(i, v);
166 #endif
167 }
168
169 /**
170 * raw_atomic_long_add_return_release() - atomic add with release ordering
171 * @i: long value to add
172 * @v: pointer to atomic_long_t
173 *
174 * Atomically updates @v to (@v + @i) with release ordering.
175 *
176 * Safe to use in noinstr code; prefer atomic_long_add_return_release() elsewhere.
177 *
178 * Return: The updated value of @v.
179 */
180 static __always_inline long
raw_atomic_long_add_return_release(long i,atomic_long_t * v)181 raw_atomic_long_add_return_release(long i, atomic_long_t *v)
182 {
183 #ifdef CONFIG_64BIT
184 return raw_atomic64_add_return_release(i, v);
185 #else
186 return raw_atomic_add_return_release(i, v);
187 #endif
188 }
189
190 /**
191 * raw_atomic_long_add_return_relaxed() - atomic add with relaxed ordering
192 * @i: long value to add
193 * @v: pointer to atomic_long_t
194 *
195 * Atomically updates @v to (@v + @i) with relaxed ordering.
196 *
197 * Safe to use in noinstr code; prefer atomic_long_add_return_relaxed() elsewhere.
198 *
199 * Return: The updated value of @v.
200 */
201 static __always_inline long
raw_atomic_long_add_return_relaxed(long i,atomic_long_t * v)202 raw_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
203 {
204 #ifdef CONFIG_64BIT
205 return raw_atomic64_add_return_relaxed(i, v);
206 #else
207 return raw_atomic_add_return_relaxed(i, v);
208 #endif
209 }
210
211 /**
212 * raw_atomic_long_fetch_add() - atomic add with full ordering
213 * @i: long value to add
214 * @v: pointer to atomic_long_t
215 *
216 * Atomically updates @v to (@v + @i) with full ordering.
217 *
218 * Safe to use in noinstr code; prefer atomic_long_fetch_add() elsewhere.
219 *
220 * Return: The original value of @v.
221 */
222 static __always_inline long
raw_atomic_long_fetch_add(long i,atomic_long_t * v)223 raw_atomic_long_fetch_add(long i, atomic_long_t *v)
224 {
225 #ifdef CONFIG_64BIT
226 return raw_atomic64_fetch_add(i, v);
227 #else
228 return raw_atomic_fetch_add(i, v);
229 #endif
230 }
231
232 /**
233 * raw_atomic_long_fetch_add_acquire() - atomic add with acquire ordering
234 * @i: long value to add
235 * @v: pointer to atomic_long_t
236 *
237 * Atomically updates @v to (@v + @i) with acquire ordering.
238 *
239 * Safe to use in noinstr code; prefer atomic_long_fetch_add_acquire() elsewhere.
240 *
241 * Return: The original value of @v.
242 */
243 static __always_inline long
raw_atomic_long_fetch_add_acquire(long i,atomic_long_t * v)244 raw_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
245 {
246 #ifdef CONFIG_64BIT
247 return raw_atomic64_fetch_add_acquire(i, v);
248 #else
249 return raw_atomic_fetch_add_acquire(i, v);
250 #endif
251 }
252
253 /**
254 * raw_atomic_long_fetch_add_release() - atomic add with release ordering
255 * @i: long value to add
256 * @v: pointer to atomic_long_t
257 *
258 * Atomically updates @v to (@v + @i) with release ordering.
259 *
260 * Safe to use in noinstr code; prefer atomic_long_fetch_add_release() elsewhere.
261 *
262 * Return: The original value of @v.
263 */
264 static __always_inline long
raw_atomic_long_fetch_add_release(long i,atomic_long_t * v)265 raw_atomic_long_fetch_add_release(long i, atomic_long_t *v)
266 {
267 #ifdef CONFIG_64BIT
268 return raw_atomic64_fetch_add_release(i, v);
269 #else
270 return raw_atomic_fetch_add_release(i, v);
271 #endif
272 }
273
274 /**
275 * raw_atomic_long_fetch_add_relaxed() - atomic add with relaxed ordering
276 * @i: long value to add
277 * @v: pointer to atomic_long_t
278 *
279 * Atomically updates @v to (@v + @i) with relaxed ordering.
280 *
281 * Safe to use in noinstr code; prefer atomic_long_fetch_add_relaxed() elsewhere.
282 *
283 * Return: The original value of @v.
284 */
285 static __always_inline long
raw_atomic_long_fetch_add_relaxed(long i,atomic_long_t * v)286 raw_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
287 {
288 #ifdef CONFIG_64BIT
289 return raw_atomic64_fetch_add_relaxed(i, v);
290 #else
291 return raw_atomic_fetch_add_relaxed(i, v);
292 #endif
293 }
294
295 /**
296 * raw_atomic_long_sub() - atomic subtract with relaxed ordering
297 * @i: long value to subtract
298 * @v: pointer to atomic_long_t
299 *
300 * Atomically updates @v to (@v - @i) with relaxed ordering.
301 *
302 * Safe to use in noinstr code; prefer atomic_long_sub() elsewhere.
303 *
304 * Return: Nothing.
305 */
306 static __always_inline void
raw_atomic_long_sub(long i,atomic_long_t * v)307 raw_atomic_long_sub(long i, atomic_long_t *v)
308 {
309 #ifdef CONFIG_64BIT
310 raw_atomic64_sub(i, v);
311 #else
312 raw_atomic_sub(i, v);
313 #endif
314 }
315
316 /**
317 * raw_atomic_long_sub_return() - atomic subtract with full ordering
318 * @i: long value to subtract
319 * @v: pointer to atomic_long_t
320 *
321 * Atomically updates @v to (@v - @i) with full ordering.
322 *
323 * Safe to use in noinstr code; prefer atomic_long_sub_return() elsewhere.
324 *
325 * Return: The updated value of @v.
326 */
327 static __always_inline long
raw_atomic_long_sub_return(long i,atomic_long_t * v)328 raw_atomic_long_sub_return(long i, atomic_long_t *v)
329 {
330 #ifdef CONFIG_64BIT
331 return raw_atomic64_sub_return(i, v);
332 #else
333 return raw_atomic_sub_return(i, v);
334 #endif
335 }
336
337 /**
338 * raw_atomic_long_sub_return_acquire() - atomic subtract with acquire ordering
339 * @i: long value to subtract
340 * @v: pointer to atomic_long_t
341 *
342 * Atomically updates @v to (@v - @i) with acquire ordering.
343 *
344 * Safe to use in noinstr code; prefer atomic_long_sub_return_acquire() elsewhere.
345 *
346 * Return: The updated value of @v.
347 */
348 static __always_inline long
raw_atomic_long_sub_return_acquire(long i,atomic_long_t * v)349 raw_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
350 {
351 #ifdef CONFIG_64BIT
352 return raw_atomic64_sub_return_acquire(i, v);
353 #else
354 return raw_atomic_sub_return_acquire(i, v);
355 #endif
356 }
357
358 /**
359 * raw_atomic_long_sub_return_release() - atomic subtract with release ordering
360 * @i: long value to subtract
361 * @v: pointer to atomic_long_t
362 *
363 * Atomically updates @v to (@v - @i) with release ordering.
364 *
365 * Safe to use in noinstr code; prefer atomic_long_sub_return_release() elsewhere.
366 *
367 * Return: The updated value of @v.
368 */
369 static __always_inline long
raw_atomic_long_sub_return_release(long i,atomic_long_t * v)370 raw_atomic_long_sub_return_release(long i, atomic_long_t *v)
371 {
372 #ifdef CONFIG_64BIT
373 return raw_atomic64_sub_return_release(i, v);
374 #else
375 return raw_atomic_sub_return_release(i, v);
376 #endif
377 }
378
379 /**
380 * raw_atomic_long_sub_return_relaxed() - atomic subtract with relaxed ordering
381 * @i: long value to subtract
382 * @v: pointer to atomic_long_t
383 *
384 * Atomically updates @v to (@v - @i) with relaxed ordering.
385 *
386 * Safe to use in noinstr code; prefer atomic_long_sub_return_relaxed() elsewhere.
387 *
388 * Return: The updated value of @v.
389 */
390 static __always_inline long
raw_atomic_long_sub_return_relaxed(long i,atomic_long_t * v)391 raw_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
392 {
393 #ifdef CONFIG_64BIT
394 return raw_atomic64_sub_return_relaxed(i, v);
395 #else
396 return raw_atomic_sub_return_relaxed(i, v);
397 #endif
398 }
399
400 /**
401 * raw_atomic_long_fetch_sub() - atomic subtract with full ordering
402 * @i: long value to subtract
403 * @v: pointer to atomic_long_t
404 *
405 * Atomically updates @v to (@v - @i) with full ordering.
406 *
407 * Safe to use in noinstr code; prefer atomic_long_fetch_sub() elsewhere.
408 *
409 * Return: The original value of @v.
410 */
411 static __always_inline long
raw_atomic_long_fetch_sub(long i,atomic_long_t * v)412 raw_atomic_long_fetch_sub(long i, atomic_long_t *v)
413 {
414 #ifdef CONFIG_64BIT
415 return raw_atomic64_fetch_sub(i, v);
416 #else
417 return raw_atomic_fetch_sub(i, v);
418 #endif
419 }
420
421 /**
422 * raw_atomic_long_fetch_sub_acquire() - atomic subtract with acquire ordering
423 * @i: long value to subtract
424 * @v: pointer to atomic_long_t
425 *
426 * Atomically updates @v to (@v - @i) with acquire ordering.
427 *
428 * Safe to use in noinstr code; prefer atomic_long_fetch_sub_acquire() elsewhere.
429 *
430 * Return: The original value of @v.
431 */
432 static __always_inline long
raw_atomic_long_fetch_sub_acquire(long i,atomic_long_t * v)433 raw_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
434 {
435 #ifdef CONFIG_64BIT
436 return raw_atomic64_fetch_sub_acquire(i, v);
437 #else
438 return raw_atomic_fetch_sub_acquire(i, v);
439 #endif
440 }
441
442 /**
443 * raw_atomic_long_fetch_sub_release() - atomic subtract with release ordering
444 * @i: long value to subtract
445 * @v: pointer to atomic_long_t
446 *
447 * Atomically updates @v to (@v - @i) with release ordering.
448 *
449 * Safe to use in noinstr code; prefer atomic_long_fetch_sub_release() elsewhere.
450 *
451 * Return: The original value of @v.
452 */
453 static __always_inline long
raw_atomic_long_fetch_sub_release(long i,atomic_long_t * v)454 raw_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
455 {
456 #ifdef CONFIG_64BIT
457 return raw_atomic64_fetch_sub_release(i, v);
458 #else
459 return raw_atomic_fetch_sub_release(i, v);
460 #endif
461 }
462
463 /**
464 * raw_atomic_long_fetch_sub_relaxed() - atomic subtract with relaxed ordering
465 * @i: long value to subtract
466 * @v: pointer to atomic_long_t
467 *
468 * Atomically updates @v to (@v - @i) with relaxed ordering.
469 *
470 * Safe to use in noinstr code; prefer atomic_long_fetch_sub_relaxed() elsewhere.
471 *
472 * Return: The original value of @v.
473 */
474 static __always_inline long
raw_atomic_long_fetch_sub_relaxed(long i,atomic_long_t * v)475 raw_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
476 {
477 #ifdef CONFIG_64BIT
478 return raw_atomic64_fetch_sub_relaxed(i, v);
479 #else
480 return raw_atomic_fetch_sub_relaxed(i, v);
481 #endif
482 }
483
484 /**
485 * raw_atomic_long_inc() - atomic increment with relaxed ordering
486 * @v: pointer to atomic_long_t
487 *
488 * Atomically updates @v to (@v + 1) with relaxed ordering.
489 *
490 * Safe to use in noinstr code; prefer atomic_long_inc() elsewhere.
491 *
492 * Return: Nothing.
493 */
494 static __always_inline void
raw_atomic_long_inc(atomic_long_t * v)495 raw_atomic_long_inc(atomic_long_t *v)
496 {
497 #ifdef CONFIG_64BIT
498 raw_atomic64_inc(v);
499 #else
500 raw_atomic_inc(v);
501 #endif
502 }
503
504 /**
505 * raw_atomic_long_inc_return() - atomic increment with full ordering
506 * @v: pointer to atomic_long_t
507 *
508 * Atomically updates @v to (@v + 1) with full ordering.
509 *
510 * Safe to use in noinstr code; prefer atomic_long_inc_return() elsewhere.
511 *
512 * Return: The updated value of @v.
513 */
514 static __always_inline long
raw_atomic_long_inc_return(atomic_long_t * v)515 raw_atomic_long_inc_return(atomic_long_t *v)
516 {
517 #ifdef CONFIG_64BIT
518 return raw_atomic64_inc_return(v);
519 #else
520 return raw_atomic_inc_return(v);
521 #endif
522 }
523
524 /**
525 * raw_atomic_long_inc_return_acquire() - atomic increment with acquire ordering
526 * @v: pointer to atomic_long_t
527 *
528 * Atomically updates @v to (@v + 1) with acquire ordering.
529 *
530 * Safe to use in noinstr code; prefer atomic_long_inc_return_acquire() elsewhere.
531 *
532 * Return: The updated value of @v.
533 */
534 static __always_inline long
raw_atomic_long_inc_return_acquire(atomic_long_t * v)535 raw_atomic_long_inc_return_acquire(atomic_long_t *v)
536 {
537 #ifdef CONFIG_64BIT
538 return raw_atomic64_inc_return_acquire(v);
539 #else
540 return raw_atomic_inc_return_acquire(v);
541 #endif
542 }
543
544 /**
545 * raw_atomic_long_inc_return_release() - atomic increment with release ordering
546 * @v: pointer to atomic_long_t
547 *
548 * Atomically updates @v to (@v + 1) with release ordering.
549 *
550 * Safe to use in noinstr code; prefer atomic_long_inc_return_release() elsewhere.
551 *
552 * Return: The updated value of @v.
553 */
554 static __always_inline long
raw_atomic_long_inc_return_release(atomic_long_t * v)555 raw_atomic_long_inc_return_release(atomic_long_t *v)
556 {
557 #ifdef CONFIG_64BIT
558 return raw_atomic64_inc_return_release(v);
559 #else
560 return raw_atomic_inc_return_release(v);
561 #endif
562 }
563
564 /**
565 * raw_atomic_long_inc_return_relaxed() - atomic increment with relaxed ordering
566 * @v: pointer to atomic_long_t
567 *
568 * Atomically updates @v to (@v + 1) with relaxed ordering.
569 *
570 * Safe to use in noinstr code; prefer atomic_long_inc_return_relaxed() elsewhere.
571 *
572 * Return: The updated value of @v.
573 */
574 static __always_inline long
raw_atomic_long_inc_return_relaxed(atomic_long_t * v)575 raw_atomic_long_inc_return_relaxed(atomic_long_t *v)
576 {
577 #ifdef CONFIG_64BIT
578 return raw_atomic64_inc_return_relaxed(v);
579 #else
580 return raw_atomic_inc_return_relaxed(v);
581 #endif
582 }
583
584 /**
585 * raw_atomic_long_fetch_inc() - atomic increment with full ordering
586 * @v: pointer to atomic_long_t
587 *
588 * Atomically updates @v to (@v + 1) with full ordering.
589 *
590 * Safe to use in noinstr code; prefer atomic_long_fetch_inc() elsewhere.
591 *
592 * Return: The original value of @v.
593 */
594 static __always_inline long
raw_atomic_long_fetch_inc(atomic_long_t * v)595 raw_atomic_long_fetch_inc(atomic_long_t *v)
596 {
597 #ifdef CONFIG_64BIT
598 return raw_atomic64_fetch_inc(v);
599 #else
600 return raw_atomic_fetch_inc(v);
601 #endif
602 }
603
604 /**
605 * raw_atomic_long_fetch_inc_acquire() - atomic increment with acquire ordering
606 * @v: pointer to atomic_long_t
607 *
608 * Atomically updates @v to (@v + 1) with acquire ordering.
609 *
610 * Safe to use in noinstr code; prefer atomic_long_fetch_inc_acquire() elsewhere.
611 *
612 * Return: The original value of @v.
613 */
614 static __always_inline long
raw_atomic_long_fetch_inc_acquire(atomic_long_t * v)615 raw_atomic_long_fetch_inc_acquire(atomic_long_t *v)
616 {
617 #ifdef CONFIG_64BIT
618 return raw_atomic64_fetch_inc_acquire(v);
619 #else
620 return raw_atomic_fetch_inc_acquire(v);
621 #endif
622 }
623
624 /**
625 * raw_atomic_long_fetch_inc_release() - atomic increment with release ordering
626 * @v: pointer to atomic_long_t
627 *
628 * Atomically updates @v to (@v + 1) with release ordering.
629 *
630 * Safe to use in noinstr code; prefer atomic_long_fetch_inc_release() elsewhere.
631 *
632 * Return: The original value of @v.
633 */
634 static __always_inline long
raw_atomic_long_fetch_inc_release(atomic_long_t * v)635 raw_atomic_long_fetch_inc_release(atomic_long_t *v)
636 {
637 #ifdef CONFIG_64BIT
638 return raw_atomic64_fetch_inc_release(v);
639 #else
640 return raw_atomic_fetch_inc_release(v);
641 #endif
642 }
643
644 /**
645 * raw_atomic_long_fetch_inc_relaxed() - atomic increment with relaxed ordering
646 * @v: pointer to atomic_long_t
647 *
648 * Atomically updates @v to (@v + 1) with relaxed ordering.
649 *
650 * Safe to use in noinstr code; prefer atomic_long_fetch_inc_relaxed() elsewhere.
651 *
652 * Return: The original value of @v.
653 */
654 static __always_inline long
raw_atomic_long_fetch_inc_relaxed(atomic_long_t * v)655 raw_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
656 {
657 #ifdef CONFIG_64BIT
658 return raw_atomic64_fetch_inc_relaxed(v);
659 #else
660 return raw_atomic_fetch_inc_relaxed(v);
661 #endif
662 }
663
664 /**
665 * raw_atomic_long_dec() - atomic decrement with relaxed ordering
666 * @v: pointer to atomic_long_t
667 *
668 * Atomically updates @v to (@v - 1) with relaxed ordering.
669 *
670 * Safe to use in noinstr code; prefer atomic_long_dec() elsewhere.
671 *
672 * Return: Nothing.
673 */
674 static __always_inline void
raw_atomic_long_dec(atomic_long_t * v)675 raw_atomic_long_dec(atomic_long_t *v)
676 {
677 #ifdef CONFIG_64BIT
678 raw_atomic64_dec(v);
679 #else
680 raw_atomic_dec(v);
681 #endif
682 }
683
684 /**
685 * raw_atomic_long_dec_return() - atomic decrement with full ordering
686 * @v: pointer to atomic_long_t
687 *
688 * Atomically updates @v to (@v - 1) with full ordering.
689 *
690 * Safe to use in noinstr code; prefer atomic_long_dec_return() elsewhere.
691 *
692 * Return: The updated value of @v.
693 */
694 static __always_inline long
raw_atomic_long_dec_return(atomic_long_t * v)695 raw_atomic_long_dec_return(atomic_long_t *v)
696 {
697 #ifdef CONFIG_64BIT
698 return raw_atomic64_dec_return(v);
699 #else
700 return raw_atomic_dec_return(v);
701 #endif
702 }
703
704 /**
705 * raw_atomic_long_dec_return_acquire() - atomic decrement with acquire ordering
706 * @v: pointer to atomic_long_t
707 *
708 * Atomically updates @v to (@v - 1) with acquire ordering.
709 *
710 * Safe to use in noinstr code; prefer atomic_long_dec_return_acquire() elsewhere.
711 *
712 * Return: The updated value of @v.
713 */
714 static __always_inline long
raw_atomic_long_dec_return_acquire(atomic_long_t * v)715 raw_atomic_long_dec_return_acquire(atomic_long_t *v)
716 {
717 #ifdef CONFIG_64BIT
718 return raw_atomic64_dec_return_acquire(v);
719 #else
720 return raw_atomic_dec_return_acquire(v);
721 #endif
722 }
723
724 /**
725 * raw_atomic_long_dec_return_release() - atomic decrement with release ordering
726 * @v: pointer to atomic_long_t
727 *
728 * Atomically updates @v to (@v - 1) with release ordering.
729 *
730 * Safe to use in noinstr code; prefer atomic_long_dec_return_release() elsewhere.
731 *
732 * Return: The updated value of @v.
733 */
734 static __always_inline long
raw_atomic_long_dec_return_release(atomic_long_t * v)735 raw_atomic_long_dec_return_release(atomic_long_t *v)
736 {
737 #ifdef CONFIG_64BIT
738 return raw_atomic64_dec_return_release(v);
739 #else
740 return raw_atomic_dec_return_release(v);
741 #endif
742 }
743
744 /**
745 * raw_atomic_long_dec_return_relaxed() - atomic decrement with relaxed ordering
746 * @v: pointer to atomic_long_t
747 *
748 * Atomically updates @v to (@v - 1) with relaxed ordering.
749 *
750 * Safe to use in noinstr code; prefer atomic_long_dec_return_relaxed() elsewhere.
751 *
752 * Return: The updated value of @v.
753 */
754 static __always_inline long
raw_atomic_long_dec_return_relaxed(atomic_long_t * v)755 raw_atomic_long_dec_return_relaxed(atomic_long_t *v)
756 {
757 #ifdef CONFIG_64BIT
758 return raw_atomic64_dec_return_relaxed(v);
759 #else
760 return raw_atomic_dec_return_relaxed(v);
761 #endif
762 }
763
764 /**
765 * raw_atomic_long_fetch_dec() - atomic decrement with full ordering
766 * @v: pointer to atomic_long_t
767 *
768 * Atomically updates @v to (@v - 1) with full ordering.
769 *
770 * Safe to use in noinstr code; prefer atomic_long_fetch_dec() elsewhere.
771 *
772 * Return: The original value of @v.
773 */
774 static __always_inline long
raw_atomic_long_fetch_dec(atomic_long_t * v)775 raw_atomic_long_fetch_dec(atomic_long_t *v)
776 {
777 #ifdef CONFIG_64BIT
778 return raw_atomic64_fetch_dec(v);
779 #else
780 return raw_atomic_fetch_dec(v);
781 #endif
782 }
783
784 /**
785 * raw_atomic_long_fetch_dec_acquire() - atomic decrement with acquire ordering
786 * @v: pointer to atomic_long_t
787 *
788 * Atomically updates @v to (@v - 1) with acquire ordering.
789 *
790 * Safe to use in noinstr code; prefer atomic_long_fetch_dec_acquire() elsewhere.
791 *
792 * Return: The original value of @v.
793 */
794 static __always_inline long
raw_atomic_long_fetch_dec_acquire(atomic_long_t * v)795 raw_atomic_long_fetch_dec_acquire(atomic_long_t *v)
796 {
797 #ifdef CONFIG_64BIT
798 return raw_atomic64_fetch_dec_acquire(v);
799 #else
800 return raw_atomic_fetch_dec_acquire(v);
801 #endif
802 }
803
804 /**
805 * raw_atomic_long_fetch_dec_release() - atomic decrement with release ordering
806 * @v: pointer to atomic_long_t
807 *
808 * Atomically updates @v to (@v - 1) with release ordering.
809 *
810 * Safe to use in noinstr code; prefer atomic_long_fetch_dec_release() elsewhere.
811 *
812 * Return: The original value of @v.
813 */
814 static __always_inline long
raw_atomic_long_fetch_dec_release(atomic_long_t * v)815 raw_atomic_long_fetch_dec_release(atomic_long_t *v)
816 {
817 #ifdef CONFIG_64BIT
818 return raw_atomic64_fetch_dec_release(v);
819 #else
820 return raw_atomic_fetch_dec_release(v);
821 #endif
822 }
823
824 /**
825 * raw_atomic_long_fetch_dec_relaxed() - atomic decrement with relaxed ordering
826 * @v: pointer to atomic_long_t
827 *
828 * Atomically updates @v to (@v - 1) with relaxed ordering.
829 *
830 * Safe to use in noinstr code; prefer atomic_long_fetch_dec_relaxed() elsewhere.
831 *
832 * Return: The original value of @v.
833 */
834 static __always_inline long
raw_atomic_long_fetch_dec_relaxed(atomic_long_t * v)835 raw_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
836 {
837 #ifdef CONFIG_64BIT
838 return raw_atomic64_fetch_dec_relaxed(v);
839 #else
840 return raw_atomic_fetch_dec_relaxed(v);
841 #endif
842 }
843
844 /**
845 * raw_atomic_long_and() - atomic bitwise AND with relaxed ordering
846 * @i: long value
847 * @v: pointer to atomic_long_t
848 *
849 * Atomically updates @v to (@v & @i) with relaxed ordering.
850 *
851 * Safe to use in noinstr code; prefer atomic_long_and() elsewhere.
852 *
853 * Return: Nothing.
854 */
855 static __always_inline void
raw_atomic_long_and(long i,atomic_long_t * v)856 raw_atomic_long_and(long i, atomic_long_t *v)
857 {
858 #ifdef CONFIG_64BIT
859 raw_atomic64_and(i, v);
860 #else
861 raw_atomic_and(i, v);
862 #endif
863 }
864
865 /**
866 * raw_atomic_long_fetch_and() - atomic bitwise AND with full ordering
867 * @i: long value
868 * @v: pointer to atomic_long_t
869 *
870 * Atomically updates @v to (@v & @i) with full ordering.
871 *
872 * Safe to use in noinstr code; prefer atomic_long_fetch_and() elsewhere.
873 *
874 * Return: The original value of @v.
875 */
876 static __always_inline long
raw_atomic_long_fetch_and(long i,atomic_long_t * v)877 raw_atomic_long_fetch_and(long i, atomic_long_t *v)
878 {
879 #ifdef CONFIG_64BIT
880 return raw_atomic64_fetch_and(i, v);
881 #else
882 return raw_atomic_fetch_and(i, v);
883 #endif
884 }
885
886 /**
887 * raw_atomic_long_fetch_and_acquire() - atomic bitwise AND with acquire ordering
888 * @i: long value
889 * @v: pointer to atomic_long_t
890 *
891 * Atomically updates @v to (@v & @i) with acquire ordering.
892 *
893 * Safe to use in noinstr code; prefer atomic_long_fetch_and_acquire() elsewhere.
894 *
895 * Return: The original value of @v.
896 */
897 static __always_inline long
raw_atomic_long_fetch_and_acquire(long i,atomic_long_t * v)898 raw_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
899 {
900 #ifdef CONFIG_64BIT
901 return raw_atomic64_fetch_and_acquire(i, v);
902 #else
903 return raw_atomic_fetch_and_acquire(i, v);
904 #endif
905 }
906
907 /**
908 * raw_atomic_long_fetch_and_release() - atomic bitwise AND with release ordering
909 * @i: long value
910 * @v: pointer to atomic_long_t
911 *
912 * Atomically updates @v to (@v & @i) with release ordering.
913 *
914 * Safe to use in noinstr code; prefer atomic_long_fetch_and_release() elsewhere.
915 *
916 * Return: The original value of @v.
917 */
918 static __always_inline long
raw_atomic_long_fetch_and_release(long i,atomic_long_t * v)919 raw_atomic_long_fetch_and_release(long i, atomic_long_t *v)
920 {
921 #ifdef CONFIG_64BIT
922 return raw_atomic64_fetch_and_release(i, v);
923 #else
924 return raw_atomic_fetch_and_release(i, v);
925 #endif
926 }
927
928 /**
929 * raw_atomic_long_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
930 * @i: long value
931 * @v: pointer to atomic_long_t
932 *
933 * Atomically updates @v to (@v & @i) with relaxed ordering.
934 *
935 * Safe to use in noinstr code; prefer atomic_long_fetch_and_relaxed() elsewhere.
936 *
937 * Return: The original value of @v.
938 */
939 static __always_inline long
raw_atomic_long_fetch_and_relaxed(long i,atomic_long_t * v)940 raw_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
941 {
942 #ifdef CONFIG_64BIT
943 return raw_atomic64_fetch_and_relaxed(i, v);
944 #else
945 return raw_atomic_fetch_and_relaxed(i, v);
946 #endif
947 }
948
949 /**
950 * raw_atomic_long_andnot() - atomic bitwise AND NOT with relaxed ordering
951 * @i: long value
952 * @v: pointer to atomic_long_t
953 *
954 * Atomically updates @v to (@v & ~@i) with relaxed ordering.
955 *
956 * Safe to use in noinstr code; prefer atomic_long_andnot() elsewhere.
957 *
958 * Return: Nothing.
959 */
960 static __always_inline void
raw_atomic_long_andnot(long i,atomic_long_t * v)961 raw_atomic_long_andnot(long i, atomic_long_t *v)
962 {
963 #ifdef CONFIG_64BIT
964 raw_atomic64_andnot(i, v);
965 #else
966 raw_atomic_andnot(i, v);
967 #endif
968 }
969
970 /**
971 * raw_atomic_long_fetch_andnot() - atomic bitwise AND NOT with full ordering
972 * @i: long value
973 * @v: pointer to atomic_long_t
974 *
975 * Atomically updates @v to (@v & ~@i) with full ordering.
976 *
977 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot() elsewhere.
978 *
979 * Return: The original value of @v.
980 */
981 static __always_inline long
raw_atomic_long_fetch_andnot(long i,atomic_long_t * v)982 raw_atomic_long_fetch_andnot(long i, atomic_long_t *v)
983 {
984 #ifdef CONFIG_64BIT
985 return raw_atomic64_fetch_andnot(i, v);
986 #else
987 return raw_atomic_fetch_andnot(i, v);
988 #endif
989 }
990
991 /**
992 * raw_atomic_long_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
993 * @i: long value
994 * @v: pointer to atomic_long_t
995 *
996 * Atomically updates @v to (@v & ~@i) with acquire ordering.
997 *
998 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_acquire() elsewhere.
999 *
1000 * Return: The original value of @v.
1001 */
1002 static __always_inline long
raw_atomic_long_fetch_andnot_acquire(long i,atomic_long_t * v)1003 raw_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
1004 {
1005 #ifdef CONFIG_64BIT
1006 return raw_atomic64_fetch_andnot_acquire(i, v);
1007 #else
1008 return raw_atomic_fetch_andnot_acquire(i, v);
1009 #endif
1010 }
1011
1012 /**
1013 * raw_atomic_long_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
1014 * @i: long value
1015 * @v: pointer to atomic_long_t
1016 *
1017 * Atomically updates @v to (@v & ~@i) with release ordering.
1018 *
1019 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_release() elsewhere.
1020 *
1021 * Return: The original value of @v.
1022 */
1023 static __always_inline long
raw_atomic_long_fetch_andnot_release(long i,atomic_long_t * v)1024 raw_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
1025 {
1026 #ifdef CONFIG_64BIT
1027 return raw_atomic64_fetch_andnot_release(i, v);
1028 #else
1029 return raw_atomic_fetch_andnot_release(i, v);
1030 #endif
1031 }
1032
1033 /**
1034 * raw_atomic_long_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
1035 * @i: long value
1036 * @v: pointer to atomic_long_t
1037 *
1038 * Atomically updates @v to (@v & ~@i) with relaxed ordering.
1039 *
1040 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_relaxed() elsewhere.
1041 *
1042 * Return: The original value of @v.
1043 */
1044 static __always_inline long
raw_atomic_long_fetch_andnot_relaxed(long i,atomic_long_t * v)1045 raw_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
1046 {
1047 #ifdef CONFIG_64BIT
1048 return raw_atomic64_fetch_andnot_relaxed(i, v);
1049 #else
1050 return raw_atomic_fetch_andnot_relaxed(i, v);
1051 #endif
1052 }
1053
1054 /**
1055 * raw_atomic_long_or() - atomic bitwise OR with relaxed ordering
1056 * @i: long value
1057 * @v: pointer to atomic_long_t
1058 *
1059 * Atomically updates @v to (@v | @i) with relaxed ordering.
1060 *
1061 * Safe to use in noinstr code; prefer atomic_long_or() elsewhere.
1062 *
1063 * Return: Nothing.
1064 */
1065 static __always_inline void
raw_atomic_long_or(long i,atomic_long_t * v)1066 raw_atomic_long_or(long i, atomic_long_t *v)
1067 {
1068 #ifdef CONFIG_64BIT
1069 raw_atomic64_or(i, v);
1070 #else
1071 raw_atomic_or(i, v);
1072 #endif
1073 }
1074
1075 /**
1076 * raw_atomic_long_fetch_or() - atomic bitwise OR with full ordering
1077 * @i: long value
1078 * @v: pointer to atomic_long_t
1079 *
1080 * Atomically updates @v to (@v | @i) with full ordering.
1081 *
1082 * Safe to use in noinstr code; prefer atomic_long_fetch_or() elsewhere.
1083 *
1084 * Return: The original value of @v.
1085 */
1086 static __always_inline long
raw_atomic_long_fetch_or(long i,atomic_long_t * v)1087 raw_atomic_long_fetch_or(long i, atomic_long_t *v)
1088 {
1089 #ifdef CONFIG_64BIT
1090 return raw_atomic64_fetch_or(i, v);
1091 #else
1092 return raw_atomic_fetch_or(i, v);
1093 #endif
1094 }
1095
1096 /**
1097 * raw_atomic_long_fetch_or_acquire() - atomic bitwise OR with acquire ordering
1098 * @i: long value
1099 * @v: pointer to atomic_long_t
1100 *
1101 * Atomically updates @v to (@v | @i) with acquire ordering.
1102 *
1103 * Safe to use in noinstr code; prefer atomic_long_fetch_or_acquire() elsewhere.
1104 *
1105 * Return: The original value of @v.
1106 */
1107 static __always_inline long
raw_atomic_long_fetch_or_acquire(long i,atomic_long_t * v)1108 raw_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
1109 {
1110 #ifdef CONFIG_64BIT
1111 return raw_atomic64_fetch_or_acquire(i, v);
1112 #else
1113 return raw_atomic_fetch_or_acquire(i, v);
1114 #endif
1115 }
1116
1117 /**
1118 * raw_atomic_long_fetch_or_release() - atomic bitwise OR with release ordering
1119 * @i: long value
1120 * @v: pointer to atomic_long_t
1121 *
1122 * Atomically updates @v to (@v | @i) with release ordering.
1123 *
1124 * Safe to use in noinstr code; prefer atomic_long_fetch_or_release() elsewhere.
1125 *
1126 * Return: The original value of @v.
1127 */
1128 static __always_inline long
raw_atomic_long_fetch_or_release(long i,atomic_long_t * v)1129 raw_atomic_long_fetch_or_release(long i, atomic_long_t *v)
1130 {
1131 #ifdef CONFIG_64BIT
1132 return raw_atomic64_fetch_or_release(i, v);
1133 #else
1134 return raw_atomic_fetch_or_release(i, v);
1135 #endif
1136 }
1137
1138 /**
1139 * raw_atomic_long_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
1140 * @i: long value
1141 * @v: pointer to atomic_long_t
1142 *
1143 * Atomically updates @v to (@v | @i) with relaxed ordering.
1144 *
1145 * Safe to use in noinstr code; prefer atomic_long_fetch_or_relaxed() elsewhere.
1146 *
1147 * Return: The original value of @v.
1148 */
1149 static __always_inline long
raw_atomic_long_fetch_or_relaxed(long i,atomic_long_t * v)1150 raw_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
1151 {
1152 #ifdef CONFIG_64BIT
1153 return raw_atomic64_fetch_or_relaxed(i, v);
1154 #else
1155 return raw_atomic_fetch_or_relaxed(i, v);
1156 #endif
1157 }
1158
1159 /**
1160 * raw_atomic_long_xor() - atomic bitwise XOR with relaxed ordering
1161 * @i: long value
1162 * @v: pointer to atomic_long_t
1163 *
1164 * Atomically updates @v to (@v ^ @i) with relaxed ordering.
1165 *
1166 * Safe to use in noinstr code; prefer atomic_long_xor() elsewhere.
1167 *
1168 * Return: Nothing.
1169 */
1170 static __always_inline void
raw_atomic_long_xor(long i,atomic_long_t * v)1171 raw_atomic_long_xor(long i, atomic_long_t *v)
1172 {
1173 #ifdef CONFIG_64BIT
1174 raw_atomic64_xor(i, v);
1175 #else
1176 raw_atomic_xor(i, v);
1177 #endif
1178 }
1179
1180 /**
1181 * raw_atomic_long_fetch_xor() - atomic bitwise XOR with full ordering
1182 * @i: long value
1183 * @v: pointer to atomic_long_t
1184 *
1185 * Atomically updates @v to (@v ^ @i) with full ordering.
1186 *
1187 * Safe to use in noinstr code; prefer atomic_long_fetch_xor() elsewhere.
1188 *
1189 * Return: The original value of @v.
1190 */
1191 static __always_inline long
raw_atomic_long_fetch_xor(long i,atomic_long_t * v)1192 raw_atomic_long_fetch_xor(long i, atomic_long_t *v)
1193 {
1194 #ifdef CONFIG_64BIT
1195 return raw_atomic64_fetch_xor(i, v);
1196 #else
1197 return raw_atomic_fetch_xor(i, v);
1198 #endif
1199 }
1200
1201 /**
1202 * raw_atomic_long_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
1203 * @i: long value
1204 * @v: pointer to atomic_long_t
1205 *
1206 * Atomically updates @v to (@v ^ @i) with acquire ordering.
1207 *
1208 * Safe to use in noinstr code; prefer atomic_long_fetch_xor_acquire() elsewhere.
1209 *
1210 * Return: The original value of @v.
1211 */
1212 static __always_inline long
raw_atomic_long_fetch_xor_acquire(long i,atomic_long_t * v)1213 raw_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
1214 {
1215 #ifdef CONFIG_64BIT
1216 return raw_atomic64_fetch_xor_acquire(i, v);
1217 #else
1218 return raw_atomic_fetch_xor_acquire(i, v);
1219 #endif
1220 }
1221
1222 /**
1223 * raw_atomic_long_fetch_xor_release() - atomic bitwise XOR with release ordering
1224 * @i: long value
1225 * @v: pointer to atomic_long_t
1226 *
1227 * Atomically updates @v to (@v ^ @i) with release ordering.
1228 *
1229 * Safe to use in noinstr code; prefer atomic_long_fetch_xor_release() elsewhere.
1230 *
1231 * Return: The original value of @v.
1232 */
1233 static __always_inline long
raw_atomic_long_fetch_xor_release(long i,atomic_long_t * v)1234 raw_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
1235 {
1236 #ifdef CONFIG_64BIT
1237 return raw_atomic64_fetch_xor_release(i, v);
1238 #else
1239 return raw_atomic_fetch_xor_release(i, v);
1240 #endif
1241 }
1242
1243 /**
1244 * raw_atomic_long_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
1245 * @i: long value
1246 * @v: pointer to atomic_long_t
1247 *
1248 * Atomically updates @v to (@v ^ @i) with relaxed ordering.
1249 *
1250 * Safe to use in noinstr code; prefer atomic_long_fetch_xor_relaxed() elsewhere.
1251 *
1252 * Return: The original value of @v.
1253 */
1254 static __always_inline long
raw_atomic_long_fetch_xor_relaxed(long i,atomic_long_t * v)1255 raw_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
1256 {
1257 #ifdef CONFIG_64BIT
1258 return raw_atomic64_fetch_xor_relaxed(i, v);
1259 #else
1260 return raw_atomic_fetch_xor_relaxed(i, v);
1261 #endif
1262 }
1263
1264 /**
1265 * raw_atomic_long_xchg() - atomic exchange with full ordering
1266 * @v: pointer to atomic_long_t
1267 * @new: long value to assign
1268 *
1269 * Atomically updates @v to @new with full ordering.
1270 *
1271 * Safe to use in noinstr code; prefer atomic_long_xchg() elsewhere.
1272 *
1273 * Return: The original value of @v.
1274 */
1275 static __always_inline long
raw_atomic_long_xchg(atomic_long_t * v,long new)1276 raw_atomic_long_xchg(atomic_long_t *v, long new)
1277 {
1278 #ifdef CONFIG_64BIT
1279 return raw_atomic64_xchg(v, new);
1280 #else
1281 return raw_atomic_xchg(v, new);
1282 #endif
1283 }
1284
1285 /**
1286 * raw_atomic_long_xchg_acquire() - atomic exchange with acquire ordering
1287 * @v: pointer to atomic_long_t
1288 * @new: long value to assign
1289 *
1290 * Atomically updates @v to @new with acquire ordering.
1291 *
1292 * Safe to use in noinstr code; prefer atomic_long_xchg_acquire() elsewhere.
1293 *
1294 * Return: The original value of @v.
1295 */
1296 static __always_inline long
raw_atomic_long_xchg_acquire(atomic_long_t * v,long new)1297 raw_atomic_long_xchg_acquire(atomic_long_t *v, long new)
1298 {
1299 #ifdef CONFIG_64BIT
1300 return raw_atomic64_xchg_acquire(v, new);
1301 #else
1302 return raw_atomic_xchg_acquire(v, new);
1303 #endif
1304 }
1305
1306 /**
1307 * raw_atomic_long_xchg_release() - atomic exchange with release ordering
1308 * @v: pointer to atomic_long_t
1309 * @new: long value to assign
1310 *
1311 * Atomically updates @v to @new with release ordering.
1312 *
1313 * Safe to use in noinstr code; prefer atomic_long_xchg_release() elsewhere.
1314 *
1315 * Return: The original value of @v.
1316 */
1317 static __always_inline long
raw_atomic_long_xchg_release(atomic_long_t * v,long new)1318 raw_atomic_long_xchg_release(atomic_long_t *v, long new)
1319 {
1320 #ifdef CONFIG_64BIT
1321 return raw_atomic64_xchg_release(v, new);
1322 #else
1323 return raw_atomic_xchg_release(v, new);
1324 #endif
1325 }
1326
1327 /**
1328 * raw_atomic_long_xchg_relaxed() - atomic exchange with relaxed ordering
1329 * @v: pointer to atomic_long_t
1330 * @new: long value to assign
1331 *
1332 * Atomically updates @v to @new with relaxed ordering.
1333 *
1334 * Safe to use in noinstr code; prefer atomic_long_xchg_relaxed() elsewhere.
1335 *
1336 * Return: The original value of @v.
1337 */
1338 static __always_inline long
raw_atomic_long_xchg_relaxed(atomic_long_t * v,long new)1339 raw_atomic_long_xchg_relaxed(atomic_long_t *v, long new)
1340 {
1341 #ifdef CONFIG_64BIT
1342 return raw_atomic64_xchg_relaxed(v, new);
1343 #else
1344 return raw_atomic_xchg_relaxed(v, new);
1345 #endif
1346 }
1347
1348 /**
1349 * raw_atomic_long_cmpxchg() - atomic compare and exchange with full ordering
1350 * @v: pointer to atomic_long_t
1351 * @old: long value to compare with
1352 * @new: long value to assign
1353 *
1354 * If (@v == @old), atomically updates @v to @new with full ordering.
1355 * Otherwise, @v is not modified and relaxed ordering is provided.
1356 *
1357 * Safe to use in noinstr code; prefer atomic_long_cmpxchg() elsewhere.
1358 *
1359 * Return: The original value of @v.
1360 */
1361 static __always_inline long
raw_atomic_long_cmpxchg(atomic_long_t * v,long old,long new)1362 raw_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
1363 {
1364 #ifdef CONFIG_64BIT
1365 return raw_atomic64_cmpxchg(v, old, new);
1366 #else
1367 return raw_atomic_cmpxchg(v, old, new);
1368 #endif
1369 }
1370
1371 /**
1372 * raw_atomic_long_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
1373 * @v: pointer to atomic_long_t
1374 * @old: long value to compare with
1375 * @new: long value to assign
1376 *
1377 * If (@v == @old), atomically updates @v to @new with acquire ordering.
1378 * Otherwise, @v is not modified and relaxed ordering is provided.
1379 *
1380 * Safe to use in noinstr code; prefer atomic_long_cmpxchg_acquire() elsewhere.
1381 *
1382 * Return: The original value of @v.
1383 */
1384 static __always_inline long
raw_atomic_long_cmpxchg_acquire(atomic_long_t * v,long old,long new)1385 raw_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
1386 {
1387 #ifdef CONFIG_64BIT
1388 return raw_atomic64_cmpxchg_acquire(v, old, new);
1389 #else
1390 return raw_atomic_cmpxchg_acquire(v, old, new);
1391 #endif
1392 }
1393
1394 /**
1395 * raw_atomic_long_cmpxchg_release() - atomic compare and exchange with release ordering
1396 * @v: pointer to atomic_long_t
1397 * @old: long value to compare with
1398 * @new: long value to assign
1399 *
1400 * If (@v == @old), atomically updates @v to @new with release ordering.
1401 * Otherwise, @v is not modified and relaxed ordering is provided.
1402 *
1403 * Safe to use in noinstr code; prefer atomic_long_cmpxchg_release() elsewhere.
1404 *
1405 * Return: The original value of @v.
1406 */
1407 static __always_inline long
raw_atomic_long_cmpxchg_release(atomic_long_t * v,long old,long new)1408 raw_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
1409 {
1410 #ifdef CONFIG_64BIT
1411 return raw_atomic64_cmpxchg_release(v, old, new);
1412 #else
1413 return raw_atomic_cmpxchg_release(v, old, new);
1414 #endif
1415 }
1416
1417 /**
1418 * raw_atomic_long_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
1419 * @v: pointer to atomic_long_t
1420 * @old: long value to compare with
1421 * @new: long value to assign
1422 *
1423 * If (@v == @old), atomically updates @v to @new with relaxed ordering.
1424 * Otherwise, @v is not modified and relaxed ordering is provided.
1425 *
1426 * Safe to use in noinstr code; prefer atomic_long_cmpxchg_relaxed() elsewhere.
1427 *
1428 * Return: The original value of @v.
1429 */
1430 static __always_inline long
raw_atomic_long_cmpxchg_relaxed(atomic_long_t * v,long old,long new)1431 raw_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
1432 {
1433 #ifdef CONFIG_64BIT
1434 return raw_atomic64_cmpxchg_relaxed(v, old, new);
1435 #else
1436 return raw_atomic_cmpxchg_relaxed(v, old, new);
1437 #endif
1438 }
1439
1440 /**
1441 * raw_atomic_long_try_cmpxchg() - atomic compare and exchange with full ordering
1442 * @v: pointer to atomic_long_t
1443 * @old: pointer to long value to compare with
1444 * @new: long value to assign
1445 *
1446 * If (@v == @old), atomically updates @v to @new with full ordering.
1447 * Otherwise, @v is not modified, @old is updated to the current value of @v,
1448 * and relaxed ordering is provided.
1449 *
1450 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg() elsewhere.
1451 *
1452 * Return: @true if the exchange occured, @false otherwise.
1453 */
1454 static __always_inline bool
raw_atomic_long_try_cmpxchg(atomic_long_t * v,long * old,long new)1455 raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
1456 {
1457 #ifdef CONFIG_64BIT
1458 return raw_atomic64_try_cmpxchg(v, (s64 *)old, new);
1459 #else
1460 return raw_atomic_try_cmpxchg(v, (int *)old, new);
1461 #endif
1462 }
1463
1464 /**
1465 * raw_atomic_long_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
1466 * @v: pointer to atomic_long_t
1467 * @old: pointer to long value to compare with
1468 * @new: long value to assign
1469 *
1470 * If (@v == @old), atomically updates @v to @new with acquire ordering.
1471 * Otherwise, @v is not modified, @old is updated to the current value of @v,
1472 * and relaxed ordering is provided.
1473 *
1474 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_acquire() elsewhere.
1475 *
1476 * Return: @true if the exchange occured, @false otherwise.
1477 */
1478 static __always_inline bool
raw_atomic_long_try_cmpxchg_acquire(atomic_long_t * v,long * old,long new)1479 raw_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
1480 {
1481 #ifdef CONFIG_64BIT
1482 return raw_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
1483 #else
1484 return raw_atomic_try_cmpxchg_acquire(v, (int *)old, new);
1485 #endif
1486 }
1487
1488 /**
1489 * raw_atomic_long_try_cmpxchg_release() - atomic compare and exchange with release ordering
1490 * @v: pointer to atomic_long_t
1491 * @old: pointer to long value to compare with
1492 * @new: long value to assign
1493 *
1494 * If (@v == @old), atomically updates @v to @new with release ordering.
1495 * Otherwise, @v is not modified, @old is updated to the current value of @v,
1496 * and relaxed ordering is provided.
1497 *
1498 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_release() elsewhere.
1499 *
1500 * Return: @true if the exchange occured, @false otherwise.
1501 */
1502 static __always_inline bool
raw_atomic_long_try_cmpxchg_release(atomic_long_t * v,long * old,long new)1503 raw_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
1504 {
1505 #ifdef CONFIG_64BIT
1506 return raw_atomic64_try_cmpxchg_release(v, (s64 *)old, new);
1507 #else
1508 return raw_atomic_try_cmpxchg_release(v, (int *)old, new);
1509 #endif
1510 }
1511
1512 /**
1513 * raw_atomic_long_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
1514 * @v: pointer to atomic_long_t
1515 * @old: pointer to long value to compare with
1516 * @new: long value to assign
1517 *
1518 * If (@v == @old), atomically updates @v to @new with relaxed ordering.
1519 * Otherwise, @v is not modified, @old is updated to the current value of @v,
1520 * and relaxed ordering is provided.
1521 *
1522 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_relaxed() elsewhere.
1523 *
1524 * Return: @true if the exchange occured, @false otherwise.
1525 */
1526 static __always_inline bool
raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t * v,long * old,long new)1527 raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
1528 {
1529 #ifdef CONFIG_64BIT
1530 return raw_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
1531 #else
1532 return raw_atomic_try_cmpxchg_relaxed(v, (int *)old, new);
1533 #endif
1534 }
1535
1536 /**
1537 * raw_atomic_long_sub_and_test() - atomic subtract and test if zero with full ordering
1538 * @i: long value to subtract
1539 * @v: pointer to atomic_long_t
1540 *
1541 * Atomically updates @v to (@v - @i) with full ordering.
1542 *
1543 * Safe to use in noinstr code; prefer atomic_long_sub_and_test() elsewhere.
1544 *
1545 * Return: @true if the resulting value of @v is zero, @false otherwise.
1546 */
1547 static __always_inline bool
raw_atomic_long_sub_and_test(long i,atomic_long_t * v)1548 raw_atomic_long_sub_and_test(long i, atomic_long_t *v)
1549 {
1550 #ifdef CONFIG_64BIT
1551 return raw_atomic64_sub_and_test(i, v);
1552 #else
1553 return raw_atomic_sub_and_test(i, v);
1554 #endif
1555 }
1556
1557 /**
1558 * raw_atomic_long_dec_and_test() - atomic decrement and test if zero with full ordering
1559 * @v: pointer to atomic_long_t
1560 *
1561 * Atomically updates @v to (@v - 1) with full ordering.
1562 *
1563 * Safe to use in noinstr code; prefer atomic_long_dec_and_test() elsewhere.
1564 *
1565 * Return: @true if the resulting value of @v is zero, @false otherwise.
1566 */
1567 static __always_inline bool
raw_atomic_long_dec_and_test(atomic_long_t * v)1568 raw_atomic_long_dec_and_test(atomic_long_t *v)
1569 {
1570 #ifdef CONFIG_64BIT
1571 return raw_atomic64_dec_and_test(v);
1572 #else
1573 return raw_atomic_dec_and_test(v);
1574 #endif
1575 }
1576
1577 /**
1578 * raw_atomic_long_inc_and_test() - atomic increment and test if zero with full ordering
1579 * @v: pointer to atomic_long_t
1580 *
1581 * Atomically updates @v to (@v + 1) with full ordering.
1582 *
1583 * Safe to use in noinstr code; prefer atomic_long_inc_and_test() elsewhere.
1584 *
1585 * Return: @true if the resulting value of @v is zero, @false otherwise.
1586 */
1587 static __always_inline bool
raw_atomic_long_inc_and_test(atomic_long_t * v)1588 raw_atomic_long_inc_and_test(atomic_long_t *v)
1589 {
1590 #ifdef CONFIG_64BIT
1591 return raw_atomic64_inc_and_test(v);
1592 #else
1593 return raw_atomic_inc_and_test(v);
1594 #endif
1595 }
1596
1597 /**
1598 * raw_atomic_long_add_negative() - atomic add and test if negative with full ordering
1599 * @i: long value to add
1600 * @v: pointer to atomic_long_t
1601 *
1602 * Atomically updates @v to (@v + @i) with full ordering.
1603 *
1604 * Safe to use in noinstr code; prefer atomic_long_add_negative() elsewhere.
1605 *
1606 * Return: @true if the resulting value of @v is negative, @false otherwise.
1607 */
1608 static __always_inline bool
raw_atomic_long_add_negative(long i,atomic_long_t * v)1609 raw_atomic_long_add_negative(long i, atomic_long_t *v)
1610 {
1611 #ifdef CONFIG_64BIT
1612 return raw_atomic64_add_negative(i, v);
1613 #else
1614 return raw_atomic_add_negative(i, v);
1615 #endif
1616 }
1617
1618 /**
1619 * raw_atomic_long_add_negative_acquire() - atomic add and test if negative with acquire ordering
1620 * @i: long value to add
1621 * @v: pointer to atomic_long_t
1622 *
1623 * Atomically updates @v to (@v + @i) with acquire ordering.
1624 *
1625 * Safe to use in noinstr code; prefer atomic_long_add_negative_acquire() elsewhere.
1626 *
1627 * Return: @true if the resulting value of @v is negative, @false otherwise.
1628 */
1629 static __always_inline bool
raw_atomic_long_add_negative_acquire(long i,atomic_long_t * v)1630 raw_atomic_long_add_negative_acquire(long i, atomic_long_t *v)
1631 {
1632 #ifdef CONFIG_64BIT
1633 return raw_atomic64_add_negative_acquire(i, v);
1634 #else
1635 return raw_atomic_add_negative_acquire(i, v);
1636 #endif
1637 }
1638
1639 /**
1640 * raw_atomic_long_add_negative_release() - atomic add and test if negative with release ordering
1641 * @i: long value to add
1642 * @v: pointer to atomic_long_t
1643 *
1644 * Atomically updates @v to (@v + @i) with release ordering.
1645 *
1646 * Safe to use in noinstr code; prefer atomic_long_add_negative_release() elsewhere.
1647 *
1648 * Return: @true if the resulting value of @v is negative, @false otherwise.
1649 */
1650 static __always_inline bool
raw_atomic_long_add_negative_release(long i,atomic_long_t * v)1651 raw_atomic_long_add_negative_release(long i, atomic_long_t *v)
1652 {
1653 #ifdef CONFIG_64BIT
1654 return raw_atomic64_add_negative_release(i, v);
1655 #else
1656 return raw_atomic_add_negative_release(i, v);
1657 #endif
1658 }
1659
1660 /**
1661 * raw_atomic_long_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
1662 * @i: long value to add
1663 * @v: pointer to atomic_long_t
1664 *
1665 * Atomically updates @v to (@v + @i) with relaxed ordering.
1666 *
1667 * Safe to use in noinstr code; prefer atomic_long_add_negative_relaxed() elsewhere.
1668 *
1669 * Return: @true if the resulting value of @v is negative, @false otherwise.
1670 */
1671 static __always_inline bool
raw_atomic_long_add_negative_relaxed(long i,atomic_long_t * v)1672 raw_atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
1673 {
1674 #ifdef CONFIG_64BIT
1675 return raw_atomic64_add_negative_relaxed(i, v);
1676 #else
1677 return raw_atomic_add_negative_relaxed(i, v);
1678 #endif
1679 }
1680
1681 /**
1682 * raw_atomic_long_fetch_add_unless() - atomic add unless value with full ordering
1683 * @v: pointer to atomic_long_t
1684 * @a: long value to add
1685 * @u: long value to compare with
1686 *
1687 * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
1688 * Otherwise, @v is not modified and relaxed ordering is provided.
1689 *
1690 * Safe to use in noinstr code; prefer atomic_long_fetch_add_unless() elsewhere.
1691 *
1692 * Return: The original value of @v.
1693 */
1694 static __always_inline long
raw_atomic_long_fetch_add_unless(atomic_long_t * v,long a,long u)1695 raw_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
1696 {
1697 #ifdef CONFIG_64BIT
1698 return raw_atomic64_fetch_add_unless(v, a, u);
1699 #else
1700 return raw_atomic_fetch_add_unless(v, a, u);
1701 #endif
1702 }
1703
1704 /**
1705 * raw_atomic_long_add_unless() - atomic add unless value with full ordering
1706 * @v: pointer to atomic_long_t
1707 * @a: long value to add
1708 * @u: long value to compare with
1709 *
1710 * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
1711 * Otherwise, @v is not modified and relaxed ordering is provided.
1712 *
1713 * Safe to use in noinstr code; prefer atomic_long_add_unless() elsewhere.
1714 *
1715 * Return: @true if @v was updated, @false otherwise.
1716 */
1717 static __always_inline bool
raw_atomic_long_add_unless(atomic_long_t * v,long a,long u)1718 raw_atomic_long_add_unless(atomic_long_t *v, long a, long u)
1719 {
1720 #ifdef CONFIG_64BIT
1721 return raw_atomic64_add_unless(v, a, u);
1722 #else
1723 return raw_atomic_add_unless(v, a, u);
1724 #endif
1725 }
1726
1727 /**
1728 * raw_atomic_long_inc_not_zero() - atomic increment unless zero with full ordering
1729 * @v: pointer to atomic_long_t
1730 *
1731 * If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
1732 * Otherwise, @v is not modified and relaxed ordering is provided.
1733 *
1734 * Safe to use in noinstr code; prefer atomic_long_inc_not_zero() elsewhere.
1735 *
1736 * Return: @true if @v was updated, @false otherwise.
1737 */
1738 static __always_inline bool
raw_atomic_long_inc_not_zero(atomic_long_t * v)1739 raw_atomic_long_inc_not_zero(atomic_long_t *v)
1740 {
1741 #ifdef CONFIG_64BIT
1742 return raw_atomic64_inc_not_zero(v);
1743 #else
1744 return raw_atomic_inc_not_zero(v);
1745 #endif
1746 }
1747
1748 /**
1749 * raw_atomic_long_inc_unless_negative() - atomic increment unless negative with full ordering
1750 * @v: pointer to atomic_long_t
1751 *
1752 * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
1753 * Otherwise, @v is not modified and relaxed ordering is provided.
1754 *
1755 * Safe to use in noinstr code; prefer atomic_long_inc_unless_negative() elsewhere.
1756 *
1757 * Return: @true if @v was updated, @false otherwise.
1758 */
1759 static __always_inline bool
raw_atomic_long_inc_unless_negative(atomic_long_t * v)1760 raw_atomic_long_inc_unless_negative(atomic_long_t *v)
1761 {
1762 #ifdef CONFIG_64BIT
1763 return raw_atomic64_inc_unless_negative(v);
1764 #else
1765 return raw_atomic_inc_unless_negative(v);
1766 #endif
1767 }
1768
1769 /**
1770 * raw_atomic_long_dec_unless_positive() - atomic decrement unless positive with full ordering
1771 * @v: pointer to atomic_long_t
1772 *
1773 * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
1774 * Otherwise, @v is not modified and relaxed ordering is provided.
1775 *
1776 * Safe to use in noinstr code; prefer atomic_long_dec_unless_positive() elsewhere.
1777 *
1778 * Return: @true if @v was updated, @false otherwise.
1779 */
1780 static __always_inline bool
raw_atomic_long_dec_unless_positive(atomic_long_t * v)1781 raw_atomic_long_dec_unless_positive(atomic_long_t *v)
1782 {
1783 #ifdef CONFIG_64BIT
1784 return raw_atomic64_dec_unless_positive(v);
1785 #else
1786 return raw_atomic_dec_unless_positive(v);
1787 #endif
1788 }
1789
1790 /**
1791 * raw_atomic_long_dec_if_positive() - atomic decrement if positive with full ordering
1792 * @v: pointer to atomic_long_t
1793 *
1794 * If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
1795 * Otherwise, @v is not modified and relaxed ordering is provided.
1796 *
1797 * Safe to use in noinstr code; prefer atomic_long_dec_if_positive() elsewhere.
1798 *
1799 * Return: The old value of (@v - 1), regardless of whether @v was updated.
1800 */
1801 static __always_inline long
raw_atomic_long_dec_if_positive(atomic_long_t * v)1802 raw_atomic_long_dec_if_positive(atomic_long_t *v)
1803 {
1804 #ifdef CONFIG_64BIT
1805 return raw_atomic64_dec_if_positive(v);
1806 #else
1807 return raw_atomic_dec_if_positive(v);
1808 #endif
1809 }
1810
1811 #endif /* _LINUX_ATOMIC_LONG_H */
1812 // eadf183c3600b8b92b91839dd3be6bcc560c752d
1813