1 /*
2  * Copyright (c) 2016 Mellanox Technologies, Ltd.
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice unmodified, this list of conditions, and the following
10  *    disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
16  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
17  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
18  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
19  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
20  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
21  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
22  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
24  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26 
27 #ifndef _ASM_ATOMIC_LONG_H_
28 #define _ASM_ATOMIC_LONG_H_
29 
30 typedef struct {
31         volatile long counter;
32 } atomic_long_t;
33 
34 static inline void
35 atomic_long_set(atomic_long_t *v, long i)
36 {
37         WRITE_ONCE(v->counter, i);
38 }
39 
40 static inline long
41 atomic_long_xchg(atomic_long_t *v, long val)
42 {
43         return atomic_swap_long(&v->counter, val);
44 }
45 
46 static inline long
47 atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
48 {
49         long ret = old;
50 
51         for (;;) {
52                 if (atomic_fcmpset_long(&v->counter, &ret, new))
53                         break;
54                 if (ret != old)
55                         break;
56         }
57         return (ret);
58 }
59 
60 static inline int
61 atomic_long_add_unless(atomic64_t *v, long a, long u)
62 {
63 	long c = atomic64_read(v);
64 
65 	for (;;) {
66 		if (unlikely(c == u))
67 			break;
68 		if (likely(atomic_fcmpset_long(&v->counter, &c, c + a)))
69 			break;
70 	}
71 	return (c != u);
72 }
73 
74 #define atomic_long_inc_not_zero(v)	atomic_long_add_unless((v), 1, 0)
75 
76 #endif	/* _ASM_ATOMIC_LONG_H_ */
77